<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
  <channel>
    <title>DEV Community: Krishna Kurtakoti</title>
    <description>The latest articles on DEV Community by Krishna Kurtakoti (@krishnakurtakoti).</description>
    <link>https://dev.to/krishnakurtakoti</link>
    
    <atom:link rel="self" type="application/rss+xml" href="https://dev.to/feed/krishnakurtakoti"/>
    <language>en</language>
    <item>
      <title>Data for World Map Panel Plugin in Grafana from MySQL</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Tue, 28 May 2024 10:09:07 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/data-for-world-map-panel-plugin-in-grafana-from-mysql-5ggh</link>
      <guid>https://dev.to/krishnakurtakoti/data-for-world-map-panel-plugin-in-grafana-from-mysql-5ggh</guid>
      <description>&lt;p&gt;Data for World Map Panel Plugin in Grafana:&lt;/p&gt;

&lt;p&gt;Approach 1:&lt;/p&gt;

&lt;p&gt;Using the InfluxDB data:&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;Here, the data is pulled from the database(InfluxDB) from a single measurement(table).&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;Schema of the hdb7 table is:&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;Time&lt;/th&gt;
&lt;th&gt;DS_ID&lt;/th&gt;
&lt;th&gt;from&lt;/th&gt;
&lt;th&gt;graphStatement&lt;/th&gt;
&lt;th&gt;latitude&lt;/th&gt;
&lt;th&gt;longitude&lt;/th&gt;
&lt;th&gt;to&lt;/th&gt;
&lt;th&gt;totalPower&lt;/th&gt;
&lt;th&gt;value&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fgr9jj8p1i1u2rrpkw8gt.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fgr9jj8p1i1u2rrpkw8gt.png" alt="Screenshot from 2021-01-07 13-34-28" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Faud0667vmkxjztlp6qli.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Faud0667vmkxjztlp6qli.png" alt="a1.png" width="800" height="100"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fzmp9jo9kdbo8komsd0of.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Farticles%2Fzmp9jo9kdbo8komsd0of.png" alt="a2.png" width="800" height="198"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The above is the data that is pushed to the measurement(hdb7) of the InfluxDB database. Here, the hdb7 table’s graphStatement field values will be shown on the world map.&lt;/p&gt;

&lt;p&gt;Query: &lt;/p&gt;

&lt;p&gt;SELECT * from "hdb7" GROUP BY * ORDER BY DESC LIMIT 4&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;col1&lt;/th&gt;
&lt;th&gt;col2&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;Location Data&lt;/td&gt;
&lt;td&gt;table&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aggregation&lt;/td&gt;
&lt;td&gt;current&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;Map Data Options&lt;/p&gt;

&lt;p&gt;Field Mapping&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;col1&lt;/th&gt;
&lt;th&gt;col2&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;Table Query Format&lt;/td&gt;
&lt;td&gt;coordinates&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Location Name Field&lt;/td&gt;
&lt;td&gt;graphStatement&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Metric Field&lt;/td&gt;
&lt;td&gt;value&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Latitude Field&lt;/td&gt;
&lt;td&gt;latitude&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Longitude Field&lt;/td&gt;
&lt;td&gt;longitude&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbyqhoevj4c9j6m0xn3tj.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbyqhoevj4c9j6m0xn3tj.png" alt="Screenshot from 2021-01-05 16-43-23p1d1" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fraq6rumu907qwas7fqav.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fraq6rumu907qwas7fqav.png" alt="Screenshot from 2021-01-05 16-44-00p1d2" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Field Mapping&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbvrwz3ali445j0zzny12.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbvrwz3ali445j0zzny12.png" alt="Screenshot from 2021-01-05 16-44-09p1d3" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;DrawBack:&lt;/p&gt;

&lt;p&gt;1.&lt;/p&gt;

&lt;p&gt;The requirement specifically mentions having 2 tables like shown below.&lt;br&gt;
    JOIN statements are not supported in InfluxDB.&lt;/p&gt;

&lt;p&gt;Table1&lt;br&gt;
DS_ID, Lat, Long&lt;/p&gt;

&lt;p&gt;Table2&lt;br&gt;
DS_ID, Sum(pump),sum(light),..total value&lt;/p&gt;

&lt;p&gt;Table1:&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;DS_ID&lt;/th&gt;
&lt;th&gt;latitude&lt;/th&gt;
&lt;th&gt;longitude&lt;/th&gt;
&lt;th&gt;value&lt;/th&gt;
&lt;th&gt;time&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;Table2:&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;DS_ID&lt;/th&gt;
&lt;th&gt;graphStatement&lt;/th&gt;
&lt;th&gt;totalPower&lt;/th&gt;
&lt;th&gt;value&lt;/th&gt;
&lt;th&gt;time&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;The graphStatement covers the fields sum(boosterPump),  sum(lighting), sum(lift) which are to be displayed on the map.&lt;/p&gt;

&lt;p&gt;“JOIN is no longer a concept in 0.9. Series sharing a measurement can be queried together simply by omitting the differentiating tags from the WHERE clause.”&lt;br&gt;
Link:&lt;br&gt;
&lt;a href="https://github.com/influxdata/influxdb/issues/624" rel="noopener noreferrer"&gt;https://github.com/influxdata/influxdb/issues/624&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;However, there is one alternative called transformations in Grafana that can be used to join 2 tables of same datasource/mixed datasource as shown below.&lt;/p&gt;

&lt;p&gt;Here, the data for Table1 (DS_ID, Lat, Long) will be queried from MySQL database. The data for Table2 (DS_ID, Sum(pump),sum(light),..total value) will be queried from InfluxDB database.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Implementation:&lt;/strong&gt;&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;
&lt;p&gt;We can implement the above by choosing the datasource as Mixed.&lt;br&gt;
 The query 1 will be:&lt;br&gt;
 &lt;strong&gt;SELECT * from hdb10&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;The query 2 will be:&lt;br&gt;
 &lt;strong&gt;SELECT * from hdb9&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;The transform we are going to apply is : &lt;br&gt;
&lt;strong&gt;Outer join: DS_ID&lt;/strong&gt;&lt;/p&gt;
&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;All the pictures are shown below:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F8b5h21f0fsb7t2a0lozt.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F8b5h21f0fsb7t2a0lozt.png" alt="Screenshot from 2021-01-08 15-07-09p2d1" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F4loxdrpw5tezt2ulklv6.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F4loxdrpw5tezt2ulklv6.png" alt="Screenshot from 2021-01-08 15-07-28p2d2" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;When we change the metric field to Total(field from hdb10) we get the value on the map as shown below:&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fgznqj96ntgiy2h6qm95t.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fgznqj96ntgiy2h6qm95t.png" alt="Screenshot from 2021-01-08 15-15-02p2d3" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Approach 2:&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Using the MYSQL data:&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;Here, the data is pulled from the database(MySQL) from a 2 tables.&lt;/li&gt;
&lt;/ol&gt;


&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    k@k-Lenovo-G50-70:~$  mysql -u root -p
Enter password: 
Welcome to the MySQL monitor.  Commands end with ; or \g.
Your MySQL connection id is 7

mysql&amp;gt; show databases;
+--------------------+
| Database           |
+--------------------+
| information_schema |
| database_name      |
| mysql              |
| performance_schema |
| sys                |
+--------------------+
5 rows in set (0.00 sec)

mysql&amp;gt; use database_name
Reading table information for completion of table and column names
You can turn off this feature to get a quicker startup with -A

Database changed
mysql&amp;gt; show tables;
+-------------------------+
| Tables_in_database_name |
+-------------------------+
| a_one                   |
| total_power             |
| worldmap_latlng         |
| worldmap_latlng_a       |
+-------------------------+
4 rows in set (0.00 sec)

Table creation:
mysql&amp;gt;  INSERT INTO worldmap_latlng_a
    -&amp;gt; (lat,
    -&amp;gt;  lng,
    -&amp;gt;  DS_ID,
    -&amp;gt; value,
    -&amp;gt; timestamp)
    -&amp;gt; VALUES
    -&amp;gt; (1.3521,
    -&amp;gt;  103.8198,
    -&amp;gt; '0',
    -&amp;gt; 1.0,
    -&amp;gt; now());
Query OK, 1 row affected (0.13 sec)

mysql&amp;gt; CREATE TABLE worldmap_latlng_a (
    -&amp;gt;   id int(11) NOT NULL AUTO_INCREMENT,
    -&amp;gt;   lat FLOAT NOT NULL,
    -&amp;gt;   lng FLOAT NOT NULL,
    -&amp;gt;   DS_ID VARCHAR(20) NOT NULL,
    -&amp;gt;   value FLOAT NOT NULL,
    -&amp;gt;   timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
    -&amp;gt;   PRIMARY KEY (id)
    -&amp;gt;  ) AUTO_INCREMENT=7 DEFAULT CHARSET=latin1;
Query OK, 0 rows affected (0.65 sec)

mysql&amp;gt; describe total_power;
+-------------+-------------+------+-----+-------------------+-----------------------------+
| Field       | Type        | Null | Key | Default           | Extra                       |
+-------------+-------------+------+-----+-------------------+-----------------------------+
| id          | int(11)     | NO   | PRI | NULL              | auto_increment              |
| Lift        | float       | NO   |     | NULL              |                             |
| Lighting    | float       | NO   |     | NULL              |                             |
| Total       | float       | NO   |     | NULL              |                             |
| BoosterPump | float       | NO   |     | NULL              |                             |
| DS_ID       | varchar(20) | NO   |     | NULL              |                             |
| value       | float       | NO   |     | NULL              |                             |
| timestamp   | timestamp   | NO   |     | CURRENT_TIMESTAMP | on update CURRENT_TIMESTAMP |
+-------------+-------------+------+-----+-------------------+-----------------------------+
8 rows in set (0.00 sec)

mysql&amp;gt; describe worldmap_latlng_a;
+-----------+-------------+------+-----+-------------------+-----------------------------+
| Field     | Type        | Null | Key | Default           | Extra                       |
+-----------+-------------+------+-----+-------------------+-----------------------------+
| id        | int(11)     | NO   | PRI | NULL              | auto_increment              |
| lat       | float       | NO   |     | NULL              |                             |
| lng       | float       | NO   |     | NULL              |                             |
| DS_ID     | varchar(20) | NO   |     | NULL              |                             |
| value     | float       | NO   |     | NULL              |                             |
| timestamp | timestamp   | NO   |     | CURRENT_TIMESTAMP | on update CURRENT_TIMESTAMP |
+-----------+-------------+------+-----+-------------------+-----------------------------+
6 rows in set (0.00 sec)

Table1: worldmap_latlng_a
DS_ID
lat
lng
value
timestamp

Table2: total_power
DS_ID
Lift
Lighting
BoosterPump
Total
value
timestamp

INSERTING Records:

mysql&amp;gt;  INSERT INTO worldmap_latlng_a
    -&amp;gt; (lat,
    -&amp;gt;  lng,
    -&amp;gt;  DS_ID,
    -&amp;gt; value,
    -&amp;gt; timestamp)
    -&amp;gt; VALUES
    -&amp;gt; (1.3521,
    -&amp;gt;  103.8198,
    -&amp;gt; '0',
    -&amp;gt; 1.0,
    -&amp;gt; now());
Query OK, 1 row affected (0.13 sec)

mysql&amp;gt; INSERT INTO total_power
    -&amp;gt; (Lift,
    -&amp;gt; Lighting,
    -&amp;gt; Total,
    -&amp;gt; BoosterPump,
    -&amp;gt; DS_ID,
    -&amp;gt; value,
    -&amp;gt; timestamp)
    -&amp;gt; VALUES
    -&amp;gt; ( 10474.1997022,
    -&amp;gt; 8.97861111111,
    -&amp;gt; 14987.6236142,
    -&amp;gt; 4504.44530083,
    -&amp;gt;  '0',
    -&amp;gt;  1.0,
    -&amp;gt;  now());
Query OK, 1 row affected (0.11 sec)

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;DB Connection in Grafana(Add Data sources MySQL):&lt;br&gt;
Host localhost:3306&lt;br&gt;
Database database_name&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fy3v3zwnfrd5mp693xx5c.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fy3v3zwnfrd5mp693xx5c.png" alt="Screenshot from 2021-01-05 17-35-47" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;SELECT CONCAT( "-") AS Conca, worldmap_latlng_a.lat, worldmap_latlng_a.lng, worldmap_latlng_a.DS_ID, total_power.Total&lt;br&gt;
FROM total_power&lt;br&gt;
INNER JOIN worldmap_latlng_a&lt;br&gt;
ON worldmap_latlng_a.DS_ID = total_power.DS_I&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fgwufg47ntytsla1ty47k.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fgwufg47ntytsla1ty47k.png" alt="Screenshot from 2021-01-05 17-09-46p1d4" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F99jqlbnldnmy3g1f4o06.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F99jqlbnldnmy3g1f4o06.png" alt="Screenshot from 2021-01-05 17-11-27p1d5" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The data we get is:&lt;br&gt;
Conca    lat    lng    DS_ID    Total&lt;/p&gt;

&lt;ul&gt;
&lt;li&gt;       1.4    104        0    14988&lt;/li&gt;
&lt;/ul&gt;

&lt;p&gt;Here, we are getting both the data of the table1(worldmap_latlng_a) and the table2(total_power) from the join query executed above.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fm3svffq75puzc2wh2g9d.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fm3svffq75puzc2wh2g9d.png" alt="Screenshot from 2021-01-05 17-11-54p1d6" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

</description>
      <category>mysql</category>
      <category>grafana</category>
      <category>sql</category>
      <category>visualization</category>
    </item>
    <item>
      <title>Database and model design - mongoose</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Thu, 16 Dec 2021 05:13:31 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/database-and-model-design-mongoose-18oh</link>
      <guid>https://dev.to/krishnakurtakoti/database-and-model-design-mongoose-18oh</guid>
      <description>&lt;p&gt;A new user Alan Tang, on Stackoverflow posted a question regarding the top-level design of schemas for database. &lt;br&gt;
The link to the question is here: &lt;br&gt;
&lt;a href="https://stackoverflow.com/questions/70345194/node-js-model-with-inner-nested-array/70345760#70345760" rel="noopener noreferrer"&gt;https://stackoverflow.com/questions/70345194/node-js-model-with-inner-nested-array/70345760#70345760&lt;/a&gt;&lt;br&gt;
More detail information is below:&lt;/p&gt;

&lt;blockquote&gt;
&lt;p&gt;I have a question about my collection design.&lt;br&gt;
 Current design&lt;/p&gt;
&lt;/blockquote&gt;

&lt;p&gt;&lt;strong&gt;Code (Part-I):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    

  &lt;/code&gt;
  &lt;code&gt;
    
const customerSchema = mongoose.Schema({
customer_name: {
    type: String
},
purchase_history: [{
    amount: {
        type: Number,
        default: 0
    },
    currency: {
        type: String,
        require: true
    },
    description: {
        type: String
    }
}],
......
});


  &lt;/code&gt;
&lt;/div&gt;


&lt;blockquote&gt;
&lt;p&gt;Every time, if a customer purchases a new item, it will push the history into the "purchase_history".&lt;br&gt;
The purpose of "purchase_history" is to let them check their own history.&lt;br&gt;
Is this a good idea? or If you have a good idea, please few free to share.&lt;br&gt;
Thank you&lt;/p&gt;
&lt;/blockquote&gt;

&lt;p&gt;I &lt;strong&gt;answered&lt;/strong&gt; to the above question like this:&lt;br&gt;
 I have created 2 different schemas, one for the &lt;strong&gt;customer&lt;/strong&gt; and another for the &lt;strong&gt;purchase&lt;/strong&gt;. Please follow the standard procedure to keep all the &lt;strong&gt;service&lt;/strong&gt; files, &lt;strong&gt;model&lt;/strong&gt; files, &lt;strong&gt;controller&lt;/strong&gt; files in &lt;strong&gt;separate&lt;/strong&gt; folders.&lt;br&gt;
The below is the &lt;strong&gt;customer model&lt;/strong&gt;.&lt;/p&gt;

&lt;p&gt;customer.model.js&lt;br&gt;
&lt;strong&gt;Code (Part-II):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    

  &lt;/code&gt;
  &lt;code&gt;
    
const mongoose = require('mongoose');
let Schema = mongoose.Schema;

const CustomerSchema = mongoose.Schema({
  customer_name: {
    type: String,
  },
});

const customer = mongoose.model('customer', CustomerSchema);
module.exports = customer;

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;We have the &lt;strong&gt;purchase model&lt;/strong&gt; as:&lt;/p&gt;

&lt;p&gt;purchase.model.js&lt;br&gt;
&lt;strong&gt;Code (Part-III):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    

  &lt;/code&gt;
  &lt;code&gt;
    
const mongoose = require('mongoose');
let Schema = mongoose.Schema;
const customer = require('./customer.model');

var purchaseSchema = new Schema(
  {
    customerId: { type: Schema.Types.ObjectId, ref: 'customer' },
    amount: {
      type: Number,
      default: 0,
    },
    currency: {
      type: String,
      required: true,
    },
    description: {
      type: String,
    },
  },
  { timestamps: true }
);

module.exports = mongoose.model('purchase', purchaseSchema);

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;Here, we can see the customer data is stored in customer collection and the purchase data stored in the purchase collection. Each purchase record has a reference field &lt;strong&gt;'customerId'&lt;/strong&gt; which is the &lt;strong&gt;customer's unique identitifer&lt;/strong&gt;. This field is defined in the purchase model. The customer's purchase history can be &lt;strong&gt;fetched&lt;/strong&gt; by quering for the customerId field.&lt;br&gt;
We can create an api for fetching customer's purchases as:&lt;/p&gt;

&lt;p&gt;purchase.service.js&lt;br&gt;
&lt;strong&gt;Code (Part-IV):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    

  &lt;/code&gt;
  &lt;code&gt;
    
const purchaseModel = require('./purchase.model');

module.exports.getByCustomerId = async (_customerId) =&amp;gt; {
  try {
    const purchaseList = await purchaseModel.find({
      customerId: _customerId,
    });
    return purchaseList;
  } catch (err) {
    throw err.message;
  }
};

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;&lt;strong&gt;DESIGN PRINCIPLE:&lt;/strong&gt;&lt;br&gt;
The design principle followed here is to &lt;strong&gt;avoid duplication&lt;/strong&gt; as suggested by a Senior developer. It is not &lt;strong&gt;good practice&lt;/strong&gt; to &lt;strong&gt;store&lt;/strong&gt; the &lt;strong&gt;same value&lt;/strong&gt; in &lt;strong&gt;different collections&lt;/strong&gt;, the purchase data being stored in the customer collection as in Alan’s schema.&lt;/p&gt;

</description>
      <category>javascript</category>
      <category>mongodb</category>
      <category>database</category>
      <category>mongoose</category>
    </item>
    <item>
      <title>Designing backend for performance by reducing DB calls.</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Mon, 13 Dec 2021 06:50:01 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/optimizing-backend-performance-by-reducing-db-calls-1j5n</link>
      <guid>https://dev.to/krishnakurtakoti/optimizing-backend-performance-by-reducing-db-calls-1j5n</guid>
      <description>&lt;p&gt;We need to generate report based on the user’s activities for each day. A wallet for each user is created at the time of user registration. The user can top up his wallet and use his wallet to make purchases by shopping in the application’s platform. All these activities are accounted in the reports generated daily.&lt;br&gt;
&lt;strong&gt;Code (Part-I):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    

  &lt;/code&gt;
  &lt;code&gt;
    
const userWalletService = require("../../services/shop/userWallet");

let from = reportGenerationStartDate(dd/mm/yyyy00:00:00)
let to = reportGenerationEndDate(dd/mm/yyyy23:59:59)

for(let user of users){
////fetch the user’s activities from the DB(database) 

const filter = {
    updatedAt: {
      $gte: from,
      $lt: to,
    },
   userId: userId
  };
  const projection = {};
  const option = {};
  const userWallet = await userWalletService.getAllUserWallets({
    filter,
    projection,
    option,
  });

logger.info(
    Got all userWallet in ${dateRange} = ${userWallet}
  );
 openingBalance = userWallet.walletBalance;
}

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;Here, we are fetching user’s wallet data based on the user’s Id. For &lt;strong&gt;each user&lt;/strong&gt;, we are making a &lt;strong&gt;DB call&lt;/strong&gt;. If the users count increases as the application usage increases, say to 1000 or 10000 users, we need to make 10000 calls to the DB to fetch each user’s information and use that to generate the report. This is not feasible or good way to fetch the data from database.&lt;/p&gt;

&lt;p&gt;From the above code, we can see that &lt;strong&gt;DB calls inside for loop&lt;/strong&gt; can make the database to become unresponsive (in our case MongoDB to send error – &lt;strong&gt;pool draining&lt;/strong&gt; and close the connection without processing the current DB calls being made).&lt;/p&gt;

&lt;p&gt;This is &lt;strong&gt;undesirable&lt;/strong&gt; and must be &lt;strong&gt;avoided&lt;/strong&gt;. The approach to follow as suggested by one of the Project Lead is to &lt;strong&gt;fetch all the data&lt;/strong&gt; that would be fetched when each call would be made separately inside the for loop.&lt;br&gt;
One we have the data, we can do the &lt;strong&gt;processing, filtering operations&lt;/strong&gt; with the modern Javascript functions like filter(), find(), map(), etc.&lt;/p&gt;

&lt;p&gt;In any application, the DB calls can always be treated as &lt;strong&gt;bottle neck&lt;/strong&gt;  for processing the &lt;strong&gt;Api requests&lt;/strong&gt;.&lt;br&gt;
Also, if the app is running in &lt;strong&gt;cloud&lt;/strong&gt;, we must know the &lt;strong&gt;processing power&lt;/strong&gt; will be &lt;strong&gt;huge&lt;/strong&gt;. Any &lt;strong&gt;large volumes&lt;/strong&gt; of &lt;strong&gt;data&lt;/strong&gt; is handled &lt;strong&gt;efficiently&lt;/strong&gt; by these apps running on &lt;strong&gt;cloud&lt;/strong&gt; due to &lt;strong&gt;automatic scaling&lt;/strong&gt; of the &lt;strong&gt;resources&lt;/strong&gt; when required.&lt;br&gt;
We have our application running on AWS cloud and the framework is &lt;strong&gt;serverless&lt;/strong&gt; with &lt;strong&gt;lambda functions&lt;/strong&gt; which have high processing power, can do computing operations on large set of data efficiently.&lt;/p&gt;

&lt;p&gt;We need to always consider DB calls made as having constraints on the database resources and should &lt;strong&gt;limit&lt;/strong&gt; the number of calls to the database.&lt;/p&gt;

&lt;p&gt;The above code is modified as shown:&lt;/p&gt;

&lt;p&gt;We declare an array containing all the userIds based on which the user’s activities can be fetched.&lt;br&gt;
We make a query like shown below.&lt;br&gt;
userId: { $in: userIds }&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Code (Part-II):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    

  &lt;/code&gt;
  &lt;code&gt;
    
const userWalletService = require("../../services/shop/userWallet");

const filter = {
    updatedAt {
      $gte: from,
      $lt: to,
    },
   userId: { $in: userIds }
  };
  const projection = {};
  const option = {};

const userWallets = await userWalletService.getAllUserWallets({
    filter,
    projection,
    option,
  });

logger.info(
    Got all userWallets in ${dateRange} = ${userWallets.length}
  );

for (let i = 0; i &amp;lt; users.length; i++) {
const userWallet = userWallets.find((userWallet) =&amp;gt; {
return userWallet.userId.toString() == users[i]._id.toString();
});
openingBalance = userWallet.walletBalance;
}

  &lt;/code&gt;
&lt;/div&gt;


</description>
      <category>javascript</category>
      <category>mongodb</category>
      <category>database</category>
      <category>serverless</category>
    </item>
    <item>
      <title>Grafana Dashboard:</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Sat, 13 Feb 2021 06:13:34 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/grafana-dashboard-5f87</link>
      <guid>https://dev.to/krishnakurtakoti/grafana-dashboard-5f87</guid>
      <description>&lt;p&gt;In this post, we will see how to use different visualizations, like the simple graph, pie chart, world map panel in the grafana dashboard by writing queries in Influx query language and fetching data from the InfluxDB database which is a time-series database.&lt;br&gt;
a. Data Visualization&lt;br&gt;
b. Installing Plugins Docker Grafana&lt;br&gt;
c. Writing to the InfluxDB2.0&lt;/p&gt;

&lt;p&gt;LINE CHARTS:&lt;/p&gt;

&lt;p&gt;1.&lt;br&gt;
As we can see, we are showing the records for 2 different blocks, i.e, DS_Id = 0 and DS_Id = 1.The timestamp is the same for both the blocks, 1598918400, 1598918402, &lt;br&gt;
1598918403 ….. .The date for the above can be obtained from the link: &lt;a href="https://www.epochconverter.com/" rel="noopener noreferrer"&gt;https://www.epochconverter.com/&lt;/a&gt;&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F50psl0iwikvzv4yhsr6q.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F50psl0iwikvzv4yhsr6q.png" alt="Screenshot from 2020-12-21 12-37-51picEight" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;DS_Id&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;th&gt;POWER_B&lt;/th&gt;
&lt;th&gt;POWER_C&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;1598918400&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1403.421&lt;/td&gt;
&lt;td&gt;712.372&lt;/td&gt;
&lt;td&gt;1680.471&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918402&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1423.817&lt;/td&gt;
&lt;td&gt;731.249&lt;/td&gt;
&lt;td&gt;1680.658&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918403&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1444.172&lt;/td&gt;
&lt;td&gt;749.339&lt;/td&gt;
&lt;td&gt;1700.859&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918404&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1774.402&lt;/td&gt;
&lt;td&gt;1106.427&lt;/td&gt;
&lt;td&gt;2041.954&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918405&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1774.402&lt;/td&gt;
&lt;td&gt;1106.427&lt;/td&gt;
&lt;td&gt;2041.954&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;DS_Id&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;th&gt;POWER_B&lt;/th&gt;
&lt;th&gt;POWER_C&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;1598918400&lt;/td&gt;
&lt;td&gt;1&lt;/td&gt;
&lt;td&gt;821.847&lt;/td&gt;
&lt;td&gt;574.748&lt;/td&gt;
&lt;td&gt;1203.807&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918402&lt;/td&gt;
&lt;td&gt;1&lt;/td&gt;
&lt;td&gt;823.367&lt;/td&gt;
&lt;td&gt;574.315&lt;/td&gt;
&lt;td&gt;1203.795&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918403&lt;/td&gt;
&lt;td&gt;1&lt;/td&gt;
&lt;td&gt;819.939&lt;/td&gt;
&lt;td&gt;574.261&lt;/td&gt;
&lt;td&gt;1203.647&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918404&lt;/td&gt;
&lt;td&gt;1&lt;/td&gt;
&lt;td&gt;819.939&lt;/td&gt;
&lt;td&gt;574.261&lt;/td&gt;
&lt;td&gt;1203.647&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;Our requirement is to get the aggregated power for POWER_A, POWER_B, POWER_C fields. &lt;br&gt;
For example, for the timestamp 1598918400,&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;DS_Id&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;th&gt;POWER_B&lt;/th&gt;
&lt;th&gt;POWER_C&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;1598918400&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1403.421&lt;/td&gt;
&lt;td&gt;712.372&lt;/td&gt;
&lt;td&gt;1680.471&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1598918400&lt;/td&gt;
&lt;td&gt;1&lt;/td&gt;
&lt;td&gt;821.847&lt;/td&gt;
&lt;td&gt;574.748&lt;/td&gt;
&lt;td&gt;1203.807&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;we have values for POWER_A as 1403.421W and 821.847W, sum as 2225.268W. Likewise, we have to calculate for all the time-series values(1598918402, 1598918403, 1598918404, …) for POWER_A, POWER_B and POWER_C also.&lt;/p&gt;

&lt;p&gt;This computation has to be done by Grafana. We are implementing this using the SQL syntax like queries as shown below:&lt;/p&gt;

&lt;p&gt;We are going to compute the aggregated power for the field POWER_A now.&lt;br&gt;
All the queries were constructed and executed successfully by &lt;strong&gt;Narendra Reddy Mallidi, SQL Developer&lt;/strong&gt;.&lt;br&gt;
We have our first query below:&lt;br&gt;
&lt;strong&gt;SELECT POWER_A as AA FROM "hdb2" WHERE "DS_ID" = '0'&lt;/strong&gt;&lt;br&gt;
Here, POWER_A is the variable where are going to fetch from the table(table is called measurement in InfluxDB queries) named "hdb2".&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fus0oaukb5w7ocbnk3fvt.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fus0oaukb5w7ocbnk3fvt.png" alt="Screenshot from 2020-12-21 12-47-47picTen" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The same thing is followed for other 4 blocks also(DS_ID =’1’, DS_ID=’2’, DS_ID=’3’, DS_ID=’4’)&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT POWER_A as BB FROM "hdb2" WHERE "DS_ID" = '1'&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT POWER_A as CC FROM "hdb2" WHERE "DS_ID" = '2'&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT POWER_A as DD FROM "hdb2" WHERE "DS_ID" = '3'&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT POWER_A as EE FROM "hdb2" WHERE "DS_ID" = '4'&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Now, we compute the aggregated power for POWER_A with the below query:&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT sum(ds1) + sum(ds0) + sum(ds2) + sum(ds3) + sum(ds4) AS Total&lt;br&gt;
FROM&lt;br&gt;
(&lt;br&gt;
SELECT POWER_A as ds0 FROM "hdb2" WHERE "DS_ID" = '0'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_A as ds1 FROM "hdb2" WHERE "DS_ID" = '1'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_A as ds2 FROM "hdb2" WHERE "DS_ID" = '2'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_A as ds3 FROM "hdb2" WHERE "DS_ID" = '3'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_A as ds4 FROM "hdb2" WHERE "DS_ID" = '4'&lt;br&gt;
) WHERE $timeFilter GROUP BY time(1s) fill(null)&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;Here, hdb2 is the table name in our INFLUXDB1.8 database from where we are fetching our data into the Grafana dashboard running at port 3000.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Frbrobfbs4j9hew07pfbe.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Frbrobfbs4j9hew07pfbe.png" alt="Screenshot from 2020-12-21 12-22-51picOne" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;We get the graph as shown below:&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;SAMPLE&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;2020-09-01 08:27:16&lt;/td&gt;
&lt;td&gt;Sample 1&lt;/td&gt;
&lt;td&gt;7.92k&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F0jbetwlcivi0zqf1auck.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F0jbetwlcivi0zqf1auck.png" alt="Screenshot from 2020-12-21 13-07-48picTwelve" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;SAMPLE&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;2020-09-01 08:17:08&lt;/td&gt;
&lt;td&gt;Sample 2&lt;/td&gt;
&lt;td&gt;5.77k&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;(Sample 2):&lt;br&gt;
TIMESTAMP: 2020-09-01 08:17:08&lt;br&gt;
Aggregate_Power: 5.77k&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fqcz5bn0b8v73bajupzb4.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fqcz5bn0b8v73bajupzb4.png" alt="Screenshot from 2020-12-21 12-23-59picThree" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;We are getting the correct aggregated values for POWER_A. We need to do the same for POWER_B and POWER_C.&lt;/p&gt;

&lt;p&gt;For POWER_B:&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT sum(ds1) + sum(ds0) + sum(ds2) + sum(ds3) + sum(ds4) AS PB&lt;br&gt;
FROM&lt;br&gt;
(&lt;br&gt;
SELECT POWER_B as ds1 FROM "hdb2" WHERE "DS_ID" = '1'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_B as ds0 FROM "hdb2" WHERE "DS_ID" = '0'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_B as ds2 FROM "hdb2" WHERE "DS_ID" = '2'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_B as ds3 FROM "hdb2" WHERE "DS_ID" = '3'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_B as ds4 FROM "hdb2" WHERE "DS_ID" = '4'&lt;br&gt;
) WHERE $timeFilter GROUP BY time(1s) fill(null)&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;For POWER_C:&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;SELECT sum(ds1) + sum(ds0) + sum(ds2) + sum(ds3) + sum(ds4) AS PC&lt;br&gt;
FROM&lt;br&gt;
(&lt;br&gt;
SELECT POWER_C as ds1 FROM "hdb2" WHERE "DS_ID" = '1'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_C as ds0 FROM "hdb2" WHERE "DS_ID" = '0'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_C as ds2 FROM "hdb2" WHERE "DS_ID" = '2'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_C as ds3 FROM "hdb2" WHERE "DS_ID" = '3'&lt;br&gt;
),&lt;br&gt;
(&lt;br&gt;
SELECT POWER_C as ds4 FROM "hdb2" WHERE "DS_ID" = '4'&lt;br&gt;
) WHERE $timeFilter GROUP BY time(1s) fill(null)&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;We will get the graph as shown below:&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;SAMPLE&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;2020-09-01 08:27:12&lt;/td&gt;
&lt;td&gt;Sample 3&lt;/td&gt;
&lt;td&gt;7.89k&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;(Sample 3):&lt;br&gt;
TIMESTAMP: 2020-09-01 08:27:12&lt;br&gt;
Aggregate_Power: 7.89k &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F7fxeit6uer2yl4fa2215.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F7fxeit6uer2yl4fa2215.png" alt="Screenshot from 2020-12-21 12-26-32PICFour" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;SAMPLE&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;2020-09-01 08:17:08&lt;/td&gt;
&lt;td&gt;Sample 4&lt;/td&gt;
&lt;td&gt;5.77k&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;(Sample 4):&lt;br&gt;
TIMESTAMP: 2020-09-01 08:17:08&lt;br&gt;
Aggregate_Power: 5.77k&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fahnd4bqjvqclffx7t7gn.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fahnd4bqjvqclffx7t7gn.png" alt="Screenshot from 2020-12-21 12-27-41picFive" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;As we can see, these aggregated values for POWER_A(sample 3 and 4) confirm with the values shown in the previous graphs (sample 1 and 2) with the same timestamp.&lt;/p&gt;

&lt;p&gt;PIE CHART VISUALIZATION:&lt;br&gt;
Below is the query for visualization of 3 components: &lt;br&gt;
Lift_Energy, Public_Lighting_Energy, Booster_Pump_Energy.&lt;br&gt;
**&lt;br&gt;
SELECT &lt;br&gt;
((    (sum("LIFT_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) ) )* (($&lt;strong&gt;to - $&lt;/strong&gt;from) /  3600000)&lt;br&gt;
as Lift_Energy,&lt;br&gt;
(((sum("LIGHT_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000))) * ((($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) / 3600)&lt;br&gt;
as Public_Lighting_Energy,&lt;br&gt;
(((sum("PUMP_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000))) * ((($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) / 3600) as Booster_Pump_Energy&lt;br&gt;
FROM "hdb2" WHERE $timeFilter GROUP BY time(24h)&lt;br&gt;
**&lt;br&gt;
Here, we have combined 3 subqueries.&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt; (sum("LIFT_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) ) )* (($&lt;strong&gt;to - $&lt;/strong&gt;from) /  3600000)
as Lift_Energy&lt;/li&gt;
&lt;li&gt;(sum("LIGHT_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000))) * ((($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) / 3600)
as Public_Lighting_Energy&lt;/li&gt;
&lt;li&gt;(sum("PUMP_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000))) * ((($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) / 3600) as Booster_Pump_Energy
All the 3 queries are similar. We will take 1st query.
The records are taken for each second,i.e, for every consecutive second we are recording the data. Here, (sum("LIFT_TOTAL")) is the sum computed over the period mentioned - (($&lt;strong&gt;to - $&lt;/strong&gt;from) in the time window.&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F7wuyhivwkbvjbpys2dpc.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F7wuyhivwkbvjbpys2dpc.png" alt="Screenshot from 2020-12-28 11-21-43timeRange" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The query has been updated by Grafana as:&lt;br&gt;
&lt;strong&gt;SELECT &lt;br&gt;
((    (sum("LIFT_TOTAL"))  / ((1599190800000 - 1598898600000) / 1000) ) )* ((1599190800000 - 1598898600000) /  3600000)&lt;br&gt;
as Lift_Energy,&lt;br&gt;
(((sum("LIGHT_TOTAL"))  / ((1599190800000 - 1598898600000) / 1000))) * (((1599190800000 - 1598898600000) / 1000) / 3600)&lt;br&gt;
as Public_Lighting_Energy,&lt;br&gt;
(((sum("PUMP_TOTAL"))  / ((1599190800000 - 1598898600000) / 1000))) * (((1599190800000 - 1598898600000) / 1000) / 3600) as Booster_Pump_Energy&lt;br&gt;
FROM "hdb2" WHERE time &amp;gt;= 1598898600000ms and time &amp;lt;= 1599190800000ms GROUP BY time(24h)&lt;/strong&gt; &lt;/p&gt;

&lt;p&gt;Since the precision is in milliseconds, we are dividing it by 1000.Now, we get total power for the time range applied. The unit for energy consumption is watt-hour(Wh). Example, a 40-watt electric appliance operating continuously for 25 hours uses one kilowatt-hour. The value ((($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000) / 3600) gives the total operating hours which multiplied with the total power,i.e,(sum("PUMP_TOTAL"))  / (($&lt;strong&gt;to - $&lt;/strong&gt;from) / 1000))) gives power consumption in watt-hour units.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fygv2gbait94g122rvyhl.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fygv2gbait94g122rvyhl.png" alt="Screenshot from 2020-12-28 11-14-34piechartOne" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;WORLD MAP VISUALIZATION: &lt;br&gt;
The sample csv table we are using for the world map is shown below.&lt;br&gt;
| Time                | DS_ID |                     graphStatement                        | latitude | longitude | value | totalPower |&lt;br&gt;
| ------------|:-----:| -----------------------------------:  | -------: | --------: |&lt;br&gt;
 -----  |:----------:|&lt;br&gt;
| 2020-12-27 16:02:27 | 4  | Total: 49836, lift: 18377, light: 3357, pump: 54699339, DS_ID: 4 | 1.3  | 104  | 1 | 49836 |&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fejscqywetwo72ztpt8b0.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fejscqywetwo72ztpt8b0.png" alt="Screenshot from 2020-12-28 11-41-58csvImage" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Above is the csv file which we are uploading in the influxdb into the "hdb7" measurement.&lt;br&gt;
Query for world map: &lt;br&gt;
&lt;strong&gt;SELECT * from "hdb7" GROUP BY * ORDER BY DESC LIMIT 4&lt;/strong&gt;&lt;br&gt;
We can see the query and the settings also in the image below. Here, we are using latitude and longitude values to plot the map and put the graphStatement field in the label of the map. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F7hejqjxmpv3ienc2w22h.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F7hejqjxmpv3ienc2w22h.png" alt="Screenshot from 2020-12-28 11-43-04worldMapOne" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbdpn07mdn7ch3b2t07ui.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbdpn07mdn7ch3b2t07ui.png" alt="Screenshot from 2020-12-28 11-46-02picMap" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;b. &lt;strong&gt;More on Installing Plugins Docker Grafana:&lt;/strong&gt;&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;Open the SSH terminal on your machine and run the following command:&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;&lt;strong&gt;ssh your_username@host_ip_address&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;If the username on your local machine matches the one on the server you are trying to connect to, you can just type:&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;ssh host_ip_address&lt;/strong&gt;&lt;br&gt;
And hit Enter.&lt;br&gt;
After successful login, execute the below commands in the shell:&lt;br&gt;
&lt;strong&gt;sudo docker ps -a&lt;/strong&gt; &lt;br&gt;
&lt;strong&gt;sudo docker exec -it --user=root grafana /bin/sh&lt;/strong&gt; &lt;br&gt;
&lt;strong&gt;grafana-cli plugins install grafana-worldmap-panel&lt;/strong&gt;&lt;br&gt;
&lt;strong&gt;sudo docker container stop d1ead747ec87&lt;/strong&gt;&lt;br&gt;
&lt;strong&gt;sudo docker start d1ead747ec87&lt;/strong&gt;&lt;br&gt;
1.&lt;br&gt;
The &lt;strong&gt;sudo docker ps -a&lt;/strong&gt; lists all the running containers.&lt;br&gt;
2.&lt;br&gt;
We can execute/test the commands for the application running inside the container with &lt;strong&gt;sudo docker exec -it --user=root grafana /bin/sh&lt;/strong&gt; command. &lt;br&gt;
We can also ping to test the port connections with the commands:&lt;br&gt;
&lt;strong&gt;curl &lt;a href="http://localhost:3000" rel="noopener noreferrer"&gt;http://localhost:3000&lt;/a&gt;&lt;/strong&gt;&lt;br&gt;
&lt;strong&gt;curl &lt;a href="http://160.100.100.204:3000" rel="noopener noreferrer"&gt;http://160.100.100.204:3000&lt;/a&gt;&lt;/strong&gt; where the ip-address is of the remote virtual machine provisioned in the cloud which we have logged into.&lt;br&gt;
3.&lt;br&gt;
The plugins are installed with &lt;strong&gt;grafana-cli plugins install grafana-worldmap-panel&lt;/strong&gt;. &lt;br&gt;
4.&lt;br&gt;
The docker conatiners are restarted with commands:&lt;br&gt;
 &lt;strong&gt;sudo docker container stop d1ead747ec87&lt;/strong&gt;&lt;br&gt;
&lt;strong&gt;sudo docker start d1ead747ec87&lt;/strong&gt;&lt;/p&gt;


&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
Last login: Fri Dec 18 19:37:21 2020 from 49.206.11.161
root@d4eViz:~# sudo docker ps -a
CONTAINER ID        IMAGE               COMMAND             CREATED             STATUS              PORTS                    NAMES
d1ead747ec87        grafana/grafana     "/run.sh"           46 hours ago        Up 17 hours         0.0.0.0:3000-&amp;gt;3000/tcp   grafana
root@d4eViz:~# sudo docker exec -it --user=root grafana /bin/sh
/usr/share/grafana # grafana-cli plugins install grafana-worldmap-panel
installing grafana-worldmap-panel @ 0.3.2
from: https://grafana.com/api/plugins/grafana-worldmap-panel/versions/0.3.2/download
into: /var/lib/grafana/plugins

✔ Installed grafana-worldmap-panel successfully 

Restart grafana after installing plugins . 

/usr/share/grafana # sudo docker container stop d1ead747ec87
/bin/sh: sudo: not found
/usr/share/grafana # root@d4eViz:~# 
root@d4eViz:~# sudo docker container stop d1ead747ec87
d1ead747ec87
root@d4eViz:~# sudo docker start d1ead747ec87
d1ead747ec87
root@d4eViz:~# 

  &lt;/code&gt;
&lt;/div&gt;



&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
root@d4eViz:~# sudo docker rm -fv $(sudo docker ps -aq)
08d6b4e38932
root@d4eViz:~# sudo docker run -d -p 3000:3000 --name=grafana -v grafana-storage:/var/lib/grafana grafana/grafana
d1ead747ec87a566c5f8de5c36a705d3b8e1860f7e7dc78b2ea5bf2ef0f574d8
root@d4eViz:~# sudo docker ps -a
CONTAINER ID        IMAGE               COMMAND             CREATED             STATUS              PORTS                    NAMES
d1ead747ec87        grafana/grafana     "/run.sh"           4 seconds ago       Up 4 seconds        0.0.0.0:3000-&amp;gt;3000/tcp   grafana
root@d4eViz:~# sudo docker exec -it grafana /bin/sh
/usr/share/grafana $ curl http://localhost:3000
/bin/sh: curl: not found
/usr/share/grafana $ apk add curl
ERROR: Unable to lock database: Permission denied
ERROR: Failed to open apk database: Permission denied
/usr/share/grafana $ root@d4eViz:~# 
root@d4eViz:~# sudo docker exec -it --user=root grafana /bin/sh
/usr/share/grafana # apk add curl
fetch http://dl-cdn.alpinelinux.org/alpine/v3.12/main/x86_64/APKINDEX.tar.gz
fetch http://dl-cdn.alpinelinux.org/alpine/v3.12/community/x86_64/APKINDEX.tar.gz
(1/3) Installing nghttp2-libs (1.41.0-r0)
(2/3) Installing libcurl (7.69.1-r3)
(3/3) Installing curl (7.69.1-r3)
Executing busybox-1.31.1-r19.trigger
Executing glibc-bin-2.30-r0.trigger
/usr/glibc-compat/sbin/ldconfig: /usr/glibc-compat/lib/ld-linux-x86-64.so.2 is not a symbolic link

OK: 27 MiB in 37 packages
/usr/share/grafana # curl http://localhost:3000
Found.

/usr/share/grafana # curl http://160.100.100.204:3000
Found.

/usr/share/grafana # 

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
c. &lt;strong&gt;Writing to the InfluxDB2.0&lt;/strong&gt;&lt;br&gt;
Reference:

&lt;ol&gt;
&lt;li&gt;&lt;a href="https://john.soban.ski/refactor-python-to-influx-2.html" rel="noopener noreferrer"&gt;https://john.soban.ski/refactor-python-to-influx-2.html&lt;/a&gt;&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;Requirements:&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;
&lt;p&gt;Get started with InfluxDB 2.0&lt;/p&gt;

&lt;p&gt;The InfluxDB 2.0 time series platform is purpose-built to collect, store, process and visualize metrics and events. Get started with InfluxDB OSS v2.0 by downloading InfluxDB, installing the necessary executables, and running the initial setup process.&lt;/p&gt;

&lt;p&gt;If not installed, follow the link&lt;br&gt;
&lt;a href="https://docs.influxdata.com/influxdb/v2.0/get-started/" rel="noopener noreferrer"&gt;https://docs.influxdata.com/influxdb/v2.0/get-started/&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Start InfluxDB by running the influxd daemon:&lt;/p&gt;

&lt;p&gt;k@k-Lenovo-G50-70:~$ influxd&lt;/p&gt;
&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ffpx6e9h9dofp1tamvpun.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ffpx6e9h9dofp1tamvpun.png" alt="picThirteen" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;
&lt;p&gt;Python installed&lt;/p&gt;

&lt;p&gt;k@k-Lenovo-G50-70:~$ python --version&lt;br&gt;
Python 2.7.12&lt;br&gt;
k@k-Lenovo-G50-70:~$ python3 --version&lt;br&gt;
Python 3.5.2&lt;/p&gt;
&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;Steps:&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;
&lt;p&gt;Replace the values of INFLUX_TOKEN, ORG, BUCKET_NAME and measurement_name with the name of the table you need to create.&lt;/p&gt;

&lt;p&gt;Also replace the csv path you need to upload the csv file at line:&lt;br&gt;
with open('/home/k/Downloads/influxData/data_0_20200901.csv') as csv_file:&lt;br&gt;
In the csv file, we have time stored in Unix Timestamp format.&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;&lt;p&gt;Run the below program:&lt;/p&gt;&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;k@k-Lenovo-G50-70:~/a_b$ python3 pushToInflux2_0.py&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fusgja6r7yjofgg0splzs.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fusgja6r7yjofgg0splzs.png" alt="picTFourteen" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;/home/k/a_b/pushToInflux2_0.py&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Code:&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
'''
!/usr/bin/python
'''
import requests
import uuid
import random
import time
import sys
import csv
import json

INFLUX_TOKEN='qCAYOyvOErIP_KaJssk_neFar-o7PdvHL64eWYCD_ofywR_J3iubktdB58A3TE-6sM7C61Gt8qOUPvc4t0WVBg=='
ORG="asz"
INFLUX_CLOUD_URL='localhost'
BUCKET_NAME='b'
'''
Be sure to set precision to ms, not s
'''
QUERY_URI='http://{}:8086/api/v2/write?org={}&amp;amp;bucket={}&amp;amp;precision=ms'.format(INFLUX_CLOUD_URL,ORG,BUCKET_NAME)
headers = {}
headers['Authorization'] = 'Token {}'.format(INFLUX_TOKEN)
measurement_name = 'data_0_20200901'
'''
Increase the points, 2, 10 etc.
'''
number_of_points = 1000
batch_size = 1000

with open('/home/k/Downloads/influxData/data_0_20200901.csv') as csv_file:
    csv_reader = csv.reader(csv_file, delimiter=',')
    print('Processed')
for row in csv_reader:
    _row = 0
    if row[0] == "TIMESTAMP":
        pass
    else:
        _row = int((int(row[0])) * 1000)
    print(_data_end_time, row[0],_row, '\n')
    data.append("{measurement},location={location} POWER_A={POWER_A},POWER_B={POWER_B},POWER_C={POWER_C} {timestamp}"
        .format(measurement=measurement_name, location="reservoir", POWER_A=row[2], POWER_B=row[3], POWER_C=row[4], timestamp=_row))

count = 0
if name == 'main':
  # Check to see if number of points factors into batch size
  count = 0
  if (  number_of_points % batch_size != 0 ):
    raise SystemExit( 'Number of points must be divisible by batch size' )
  # Newline delimit the data
  for batch in range(0, len(data), batch_size):
    time.sleep(10)
    current_batch = '\n'.join( data[batch:batch + batch_size] )
    print(current_batch)
    r = requests.post(QUERY_URI, data=current_batch, headers=headers)
    count = count + 1
    print(r.status_code, count, data[count])


  &lt;/code&gt;
&lt;/div&gt;


&lt;ol&gt;
&lt;li&gt;In the InfluxDB2.0 screen page at:
&lt;a href="http://localhost:8086/orgs/772821f18f2f7a0d/data-explorer?bucket=b" rel="noopener noreferrer"&gt;http://localhost:8086/orgs/772821f18f2f7a0d/data-explorer?bucket=b&lt;/a&gt;
Under the Explore option&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fj90g849m1drf0iuyak0j.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fj90g849m1drf0iuyak0j.png" alt="picFifteen" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;We need to set the WINDOW PERIOD as 15s / 1m to see more points on the dashboard.&lt;br&gt;
Also, the timing window must be adjusted according to the timestamp of the record.&lt;br&gt;
For e.g,&lt;/p&gt;

&lt;div class="table-wrapper-paragraph"&gt;&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;TIMESTAMP&lt;/th&gt;
&lt;th&gt;DS_Id&lt;/th&gt;
&lt;th&gt;POWER_A&lt;/th&gt;
&lt;th&gt;POWER_B&lt;/th&gt;
&lt;th&gt;POWER_C&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;1598918400&lt;/td&gt;
&lt;td&gt;0&lt;/td&gt;
&lt;td&gt;1403.421&lt;/td&gt;
&lt;td&gt;712.372&lt;/td&gt;
&lt;td&gt;1680.471&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;&lt;/div&gt;

&lt;p&gt;So, we need to apply the time range from the above date for the results to show as in the above window.&lt;/p&gt;

&lt;p&gt;My Github Profile for code:&lt;br&gt;
Please see the master branch in my repo.&lt;br&gt;
[Link]&lt;a href="https://github.com/krishnakurtakoti/python-influxdb-2.0-write" rel="noopener noreferrer"&gt;https://github.com/krishnakurtakoti/python-influxdb-2.0-write&lt;/a&gt;&lt;/p&gt;

</description>
    </item>
    <item>
      <title>Azure Devops Series - 2</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Sat, 13 Feb 2021 06:13:24 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/azure-devops-series-2-1c1l</link>
      <guid>https://dev.to/krishnakurtakoti/azure-devops-series-2-1c1l</guid>
      <description>&lt;p&gt;&lt;strong&gt;Creating Release Pipelines&lt;/strong&gt;&lt;br&gt;
Pre-requisites:&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;We need to have a service connection ready to bind the azure web app with the devops     pipelienes. So, we proceed by creating a new service connection as shown below:&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;In the project Settings, search for Service connections.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fi4ucezpu2w9ensgirixk.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fi4ucezpu2w9ensgirixk.jpg" alt="az106a" width="800" height="439"&gt;&lt;/a&gt;&lt;br&gt;
a. Click on Service connections as shown in the figure below. Select Azure Resource Manager as connection type.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fkl7r0pv2g6urqw807bhu.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fkl7r0pv2g6urqw807bhu.jpg" alt="az107a" width="800" height="440"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;b.  For the selected Azure Resource Manager, select Publish Profile as the Authentication method.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F88hfgpaodg58j9gcyp3p.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F88hfgpaodg58j9gcyp3p.jpg" alt="az108a" width="800" height="432"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;c.  The final step of creating a new Azure service connections is completed by filling in the Subscription, Resource group(where the web app is deployed), the web app name, Service connection name.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fvg94coctvs5j5g79vu39.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fvg94coctvs5j5g79vu39.jpg" alt="az109" width="800" height="437"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Now, the service connection is created.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbsvmz27f707ws9yc3d5r.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fbsvmz27f707ws9yc3d5r.jpg" alt="az110a" width="800" height="438"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Create a new release pipeline&lt;/p&gt;

&lt;p&gt;a.  Create a new pipeline as shown in the figure below by clicking ‘new release pipeline’. &lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fwmgi7x138b09l5lhbbc1.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fwmgi7x138b09l5lhbbc1.jpg" alt="az103a" width="800" height="376"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;b.  Create a new task by searching for ‘Azure Web App’ under the Deploy section.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fwjbnyogxt91mp2la9jn9.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fwjbnyogxt91mp2la9jn9.jpg" alt="az111a" width="800" height="434"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;c. Click on Azure Web App as shown in the figure below.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F02uew83hmycgevznn8kl.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F02uew83hmycgevznn8kl.jpg" alt="az112a" width="800" height="436"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;d. Fill in the Display name, Azure subscription,App type, App name&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F41rgqtk5pfv2qkvbksuq.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F41rgqtk5pfv2qkvbksuq.jpg" alt="az113a" width="800" height="434"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Flo7x83p1nvd58b31bp8o.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Flo7x83p1nvd58b31bp8o.jpg" alt="az114a" width="800" height="439"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;e. We also need to add an artifact produced earlier from the build pipeline as shown below.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F56aylbs9kwh70kg50jp3.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F56aylbs9kwh70kg50jp3.jpg" alt="az115a" width="800" height="433"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;f. Once, the artifact is uploaded, we need to check the continous integration checkbox for us to run the pipeline whenever we have a new build pipeline running. It is shown below.&lt;br&gt;
Click on add to specify the branch to build.Here, we have ‘Development’ as the branch.&lt;/p&gt;

&lt;p&gt;6&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fklmdvf6y80jbs4yl36fc.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fklmdvf6y80jbs4yl36fc.jpg" alt="az116a" width="800" height="439"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;g. Final step is to click on Create release button to trigger the continous deployment of the release pipeline.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Faeafi1l6m2i4e112lb64.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Faeafi1l6m2i4e112lb64.jpg" alt="az117a" width="800" height="434"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fywj0zevnov7wrsb6ugcj.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fywj0zevnov7wrsb6ugcj.jpg" alt="az118a" width="800" height="435"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;We can see the release pipeline running as shown below.&lt;/p&gt;

&lt;p&gt;&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fy41wvdxcayp76unjfap4.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fy41wvdxcayp76unjfap4.jpg" alt="az119a" width="800" height="437"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;This completed the flow for building of the release pipleline by creating a new service connection in the Azure Devops in our project.&lt;/p&gt;

&lt;p&gt;I would like to mention all who helped me in development of the project.&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;&lt;p&gt;Althaf H(CTO) in building the project flow and reviewing it.&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;Swetha SR (.Net Solution Architect) for developing the build pipelines (translating from manual build cycle to automated build pipelines).&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;Narendra Reddy(.Net developer) in debugging the project.&lt;/p&gt;&lt;/li&gt;
&lt;/ol&gt;

</description>
      <category>azure</category>
      <category>devops</category>
      <category>appdeployment</category>
      <category>ci</category>
    </item>
    <item>
      <title>Azure Devops Series - 1</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Sat, 13 Feb 2021 06:13:16 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/azure-devops-series-1-1kmo</link>
      <guid>https://dev.to/krishnakurtakoti/azure-devops-series-1-1kmo</guid>
      <description>&lt;p&gt;Steps for import project to Azure Repos from Git:&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;&lt;p&gt;Create a new project.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ft7axtuaw68zln3o6esev.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ft7axtuaw68zln3o6esev.jpg" alt="1" width="800" height="433"&gt;&lt;/a&gt;&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;After getting the ‘Welcome to the project!’ screen, click on the repos.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fty1k2g4l9ms15sercf0r.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fty1k2g4l9ms15sercf0r.jpg" alt="2" width="800" height="435"&gt;&lt;/a&gt;&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;Under the Import a repository section, click on the import button.&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fw1zyefr4hhlk69k3w9rc.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fw1zyefr4hhlk69k3w9rc.jpg" alt="3" width="800" height="435"&gt;&lt;/a&gt;&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;Under the Import a Git repository screen as shown below, enter your git link, credentials as shown below:&lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fx9chrji6mipd5xjciyxb.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fx9chrji6mipd5xjciyxb.jpg" alt="4" width="800" height="434"&gt;&lt;/a&gt;&lt;/p&gt;&lt;/li&gt;
&lt;li&gt;&lt;p&gt;After clicking import button, we get the screen as shown below:&lt;br&gt;
Repository type: Git&lt;/p&gt;&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;Clone URL *: &lt;a href="https://repo-url.git.beanstackapp.com/project-workshift.git" rel="noopener noreferrer"&gt;https://repo-url.git.beanstackapp.com/project-workshift.git&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Username: &lt;/p&gt;

&lt;p&gt;Password / PAT *: &lt;br&gt;
&lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fodyr7bhyhwkw8intdu58.jpg" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fodyr7bhyhwkw8intdu58.jpg" alt="5" width="800" height="437"&gt;&lt;/a&gt;&lt;/p&gt;

&lt;ol&gt;
&lt;li&gt;After successful import, we get all the code into the repos folder along with all the commits and the branches as shown below:
&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Fy2z8xaad3g2phkgu4mcd.jpg" alt="6" width="800" height="437"&gt;
&lt;/li&gt;
&lt;/ol&gt;

&lt;p&gt;This completes successful import of the git repository into the Azure Repos. This repository in Azure Repos can be used for Continous Integration and Continous Delivery by creating Build pipelines and Release pipelines as shown in the next post of the series.&lt;/p&gt;

</description>
      <category>azure</category>
      <category>devops</category>
      <category>appdeployment</category>
      <category>ci</category>
    </item>
    <item>
      <title>Otp genertion and verification using speakeasy, Nest.js and MongoDB</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Wed, 11 Nov 2020 07:09:41 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/otp-genertion-and-verification-using-speakeasy-nest-js-and-mongodb-4nam</link>
      <guid>https://dev.to/krishnakurtakoti/otp-genertion-and-verification-using-speakeasy-nest-js-and-mongodb-4nam</guid>
      <description>&lt;p&gt;Here, we have designed and developed a flow for OTP(One time password) for user registration and also blocking a user's account after the maximum retries for incorrect otp is exceeded.&lt;br&gt;
We will go in steps for generation, verification and blocking a user's account.&lt;/p&gt;

&lt;p&gt;Step 1:&lt;br&gt;
OTP Generation:&lt;br&gt;
&lt;strong&gt;Code (Part-I):&lt;/strong&gt;&lt;br&gt;
&lt;em&gt;src/auth/auth.controller.ts&lt;/em&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import {
  Controller,
  Post,
  Req,
  UseGuards,
  Get,
  Body,
  BadRequestException,
  Param,
  NotFoundException,
} from "@nestjs/common";
import { JwtAuthGuard } from "./auth.guard";
import { LoggedInToken } from "../users/objects/login-user.dto";
import { AuthService } from "./auth.service";
import * as speakeasy from "speakeasy";
import { optSecret } from "../common/constants/config";
import {
  UNKNOWN_PARAM,
  EMAIL_NOT_FOUND,
  OTP_ERROR,
  EXISTS,
  OTP_NOT_EXPIRED,
  NEW_PASSWORD_AND_CONFIRM_NEW_PASSWORD_ERROR,
  OTP_TIME_OUT,
  TOKEN_ALREADY_USED,
  EMAIL_ERROR,
  BLOCKED_ACCOUNT_ERROR,
} from "../common/constants/string";
import { plainToClass } from "class-transformer";
import { success } from "../common/base/httpResponse.interface";
import { UserDto } from "../users/objects/create-user.dto";
import { OtpEmail, UserCycloanAccountBlockedEmail } from "../users/objects/user.registered.email";
import {
  ForgetPasswordOtpEmail,
  PasswordChangedAlert,
} from "../users/objects/user.registered.email";
import { EmailService } from "../email/email.service";
import { OtpService } from "./otp/otp.service";
import { RequestUser } from "../common/utils/controller.decorator";
import { UsersService } from "../users/users.service";
import { EmailDto } from "../email/objects/email.dto";
import { OtpDto } from "./otp/otp.dto";
import { InjectModel } from "@nestjs/mongoose";
import { IOtp, Otp } from "./otp/otp.schema";
import { Model } from "mongoose";
import { ForgotPasswordOtpService } from "./forgot-password-otp/forgot-password-otp.service";
import { ForgotPasswordOtp } from "./forgot-password-otp/forgot-password-otp.schema";
import { ForgotPasswordOtpDto } from "./forgot-password-otp/forgot-password-otp.dto";
import { OtpIncorrectService } from "./otpIncorrect/otpIncorrect.service";
import { OtpIncorrect } from "./otpIncorrect/otpIncorrect.schema";
import { BlockedAccountService } from "./blockedAccounts/blockedAccounts.service";
import { IBlockedAccount } from "./blockedAccounts/blockedAccounts.schema";
import { OTP_RETRY_LIMIT, Status, ROLES_ACCESS_ACTION, BLOCKED_ACCOUNT_TYPE } from "../common/constants/enum";
import { RolesService } from "../roles/roles.service";
import { OtpIncorrectForgotPasswordService } from "./otpIncorrectForgotPassword/otpIncorrectForgotPassword.service";
import { OtpIncorrectForgotPassword } from "./otpIncorrectForgotPassword/otpIncorrectForgotPassword.schema";

//@UseGuards(JwtAuthGuard)
@Controller("auth/refresh")
export class AuthController {
  constructor(
    private authService: AuthService,
    private emailService: EmailService,
    private usersService: UsersService,
    private otpService: OtpService,
    private forgotPasswordOtpService: ForgotPasswordOtpService,
    @InjectModel("Otp") private readonly otpModel: Model,
    @InjectModel("ForgotPasswordOtp")
    private readonly forgotPasswordotpModel: Model,
    private readonly otpIncorrectService: OtpIncorrectService,
    @InjectModel("OtpIncorrect") private readonly otpIncorrectModel: Model,
    private readonly blockedAccountService: BlockedAccountService,
    @InjectModel("BlockedAccount") private readonly blockedAccountModel: Model,
    private rolesservice: RolesService,
    private otpIncorrectForgotPasswordService: OtpIncorrectForgotPasswordService,
    @InjectModel("OtpIncorrectForgotPassword") private readonly otpIncorrectForgotPasswordModel: Model,
  ) {}

@UseGuards(JwtAuthGuard)
  @Post()
  public async refresh(@Req() req): Promise {
    return this.authService.createJwtPayLoad(req.user);
  }

//Api For generating a secret and storing it in config.ts
  @Get("secret")
  async getSecret() {
    const secret = speakeasy.generateSecret({ length: 20 });
    return secret;
  }
  //Api For generating a 6 digit token using the secret

@Post("generate")
  async getOtp(
    @Req() req,
    @Body() body: { email: string; firstName: string; lastName: string }
    //@RequestUser() user
  ) {
    debugger;
    let email = body.email;
    let firstName = body.firstName;
    let lastName = body.lastName;
    var token = speakeasy.totp({
      secret: optSecret,
      encoding: "base32",
    });
let userToAttempt: any = await this.usersService.findOneByEmail(body.email);

//Check for existing users
if (!userToAttempt) {

 let _blocked: any = await this.blockedAccountService.findOneByQuery({email: email, type: BLOCKED_ACCOUNT_TYPE.USER_REGISTRATION})

 if(_blocked !== null){
    throw new BadRequestException(BLOCKED_ACCOUNT_ERROR(email))
  }

  let query = { email: email };

  let _otp: any = await this.otpService.findOneByQuery(query);
  let currentTime: number = Date.now();
  if (_otp) {
    let k: any = await this.otpModel
      .find({ email: email })
      .sort({ updatedTime: -1 })
      .limit(1);
    if (k !== undefined) {
      let diff = (currentTime - k[0].expiry) / 1000;

      let updateTime: number = Date.now();
      let createDto: any = {
        token: token,
        email: email,
        firstName: firstName,
        lastName: lastName,
        expiry: updateTime + 15 * 60 * 1000,
      };
      if (diff &amp;gt; 0) {
        let _otp: any = await this.otpService.create(createDto);
        let _data =
          "Otp sent to registered email " +
          body.email +
          " " +
          "token:" +
          token;
        await this.emailService.sendEmail(
          new OtpEmail(
            new EmailDto({
              to: body.email,
              metaData: { email, token, firstName, lastName },
            })
          )
        );
        return success(_data);
      } else {
        let errorData = "Otp sent yet to expire in" + diff + "seconds";
        throw new BadRequestException(OTP_NOT_EXPIRED(errorData));
      }
    }
  }
  //For users requesting for the first time
  let updateTime: number = Date.now();
  let createDto: any = {
    token: token,
    email: email,
    expiry: updateTime + 15 * 60 * 1000,
  };
  let _otp1: any = await this.otpService.create(createDto);
  await this.emailService.sendEmail(
    new OtpEmail(
      new EmailDto({
        to: body.email,
        metaData: { email, token, firstName, lastName },
      })
    )
  );
  let _data1 =
    "Otp sent to registered email " + body.email + " " + "token:" + token;
  return success(_data1);
}
throw new BadRequestException(EXISTS, "User exists");

}
}

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;In the first method, below,&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
@Get("secret")
  async getSecret() {
    const secret = speakeasy.generateSecret({ length: 20 });
    return secret;
  }

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
Here, we create a secret and store it in the config.ts file(not recommended).

&lt;p&gt;&lt;em&gt;src/common/constants/config.ts&lt;/em&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import * as dotenv from "dotenv";
dotenv.config();
  export const optSecret = "HJCCU6Z7NNAS4UCHMJFHOI3YN47UYS2C";

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
After storing the secret, the OTP is generated by calling the POST &lt;a href="http://localhost:3000/api/v1/auth/refresh/generate" rel="noopener noreferrer"&gt;http://localhost:3000/api/v1/auth/refresh/generate&lt;/a&gt; Api&lt;br&gt;
by sending the email for which OTP needs to be sent in the body of the request as shown below.&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
{
    "email": "az@gmail.com"
}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2F4x8zpjloa372sg2eo1b3.png" alt="Screenshot from 2020-11-11 12-28-18cycloanHundredThirtySeven" width="800" height="449"&gt;&lt;br&gt;
 This is how the OTP generation flow follows:

&lt;p&gt;We are first verifying if OTP is already generated and is not expired using the line below:&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
this.otpService.findOneByQuery(query);

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
If no Otp record exists for the user with the given email, we infer that the user is new user requesting Otp for the first time.We directly create a Otp record in the database and generate the otp token and send it to the requested user's email account.&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
 var token = speakeasy.totp({
      secret: optSecret,
      encoding: "base32",
    });

let createDto: any = {
        token: token,
        email: email,
        expiry: updateTime + 15 * 60 * 1000,
      };
     let _otp1: any = await this.otpService.create(createDto);
      await this.emailService.sendEmail(
        new OtpEmail(
          new EmailDto({
            to: body.email,
            metaData: { email, token, firstName, lastName },
          })
        )
      );   


  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
  If a Otp record already exists for the user's email, we will find the latest Otp record with the user's email and  add a condition to check if the Otp is yet to expire. If the Otp has not expired at the time of sending a Otp generation request again, then will show an alert as "Otp sent yet to expire in" + diff + "seconds".&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
if (diff &amp;gt; 0) {
            let _otp: any = await this.otpService.create(createDto);
            let _data =
              "Otp sent to registered email " +
              body.email +
              " " +
              "token:" +
              token;
            await this.emailService.sendEmail(
              new OtpEmail(
                new EmailDto({
                  to: body.email,
                  metaData: { email, token, firstName, lastName },
                })
              )
            );
            return success(_data);
        }
       else {
       let errorData = "Otp sent yet to expire in" + diff + "seconds";
            throw new BadRequestException(OTP_NOT_EXPIRED(errorData));
       }


  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
Step 2:&lt;br&gt;
OTP Verification:

&lt;p&gt;The Otp token and the email are sent as json in the body of the request for Otp verification in the api&lt;br&gt;
POST &lt;a href="http://localhost:3000/api/v1/auth/refresh/otp/email" rel="noopener noreferrer"&gt;http://localhost:3000/api/v1/auth/refresh/otp/email&lt;/a&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
{
    "email": "az@gmail.com",
    "otp": "124583"
}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ft8rjivwpn0603aeols2h.png" alt="Screenshot from 2020-11-11 12-45-07cycloanHundredThirtyNine" width="800" height="449"&gt;

&lt;p&gt;We will verify that the email sent does not already exist in our user's database.We will then validate the token.If the token is verified, then we update the Otp record with the verified field as true&lt;br&gt;
and return the success data.&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
var tokenValidates = speakeasy.totp.verify({
            secret: optSecret,
            encoding: "base32",
            token: otp,
            window: 30,
          });
          if (tokenValidates) {
            update = {
              isVerified: true,
            };
      } else {
          ...

       }

let updated = await this.otpService.edit(_otp.id, update, updateTime);
      const _data = plainToClass(OtpDto, updated, {
        excludeExtraneousValues: true,
      });
      return success(_data);

  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;If the Otp is incorrect, we create a OtpIncorrect record and then count for the number of OtpIncorrect records bearing the user's email, then, check for the condition , &lt;br&gt;
count is greater than the Maximum retry limit.&lt;br&gt;
If the condition is true, we will block the user by creating a record in the blocked list and return "user in the blocked list" error, else we will return "Otp error"&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
if(otpErrorCount &amp;gt; OTP_RETRY_LIMIT.MAXIMUM_OTP_RETRY_LIMIT){

let _blocked: any = await this.blockedAccountService.findOneByQuery({email: email, type: BLOCKED_ACCOUNT_TYPE.USER_REGISTRATION})
              if(_blocked == null){
                let _blocked: any = await this.blockedAccountService.create(createBlockedAccountDto);
                //console.log('Your account is added to blocked list. BLOCKED LIST BLOCKED LIST BLOCKED LIST', _blocked);
            await this.emailService.sendEmail(
              new UserCycloanAccountBlockedEmail(
                new EmailDto({
                  to: body.email,
                  metaData: { email, //firstName, lastName 
                  },
                })
              )
            );
            console.log('Blocked Account email sent.................');
          }
          console.log('Your account is added to blocked list. BLOCKED LIST BLOCKED LIST BLOCKED LIST', _blocked);
          throw new BadRequestException(BLOCKED_ACCOUNT_ERROR(email))
        }
        throw new BadRequestException(OTP_ERROR);
    }


  &lt;/code&gt;
&lt;/div&gt;


&lt;p&gt;The entire code for email verification is given below:&lt;/p&gt;


&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
//Api for verifying a 6 digit token using the secret
  @Post("otp/:emailOrMobile")
  async verifyOTP(
    @Param("emailOrMobile") emailOrMobile,
    @Body() body: { otp: string; email: string }
  ) {
    debugger;
    let otp = body.otp;
    let email = body.email;
    let updateTime: number = Date.now();
    let update = {};
    let _blocked: any = await this.blockedAccountService.findOneByQuery({email: email, type: BLOCKED_ACCOUNT_TYPE.USER_REGISTRATION})
    console.log('_blocked','_blocked .................._blocked',_blocked); 
    if(_blocked !== null){
      throw new BadRequestException(BLOCKED_ACCOUNT_ERROR(email))
    }
    const userToAttempt: any = await this.usersService.findOneByEmail(email);
if (!userToAttempt) {
  let query = { token: otp, email: email };

  let _otp: any = await this.otpService.findOneByQuery(query);

  switch (emailOrMobile) {
    case "mobile":
      update = { mobile: true };
      break;
    case "email":
      var tokenValidates = speakeasy.totp.verify({
        secret: optSecret,
        encoding: "base32",
        token: otp,
        window: 30,
      });

      if (tokenValidates) {
        update = {
          isVerified: true,
        };

      } else {
        let updateTime: number = Date.now();
        let createDto: any = {
          token: otp,
          email: email
        };
        let createBlockedAccountDto: any = {
          email: email,
          type: BLOCKED_ACCOUNT_TYPE.USER_REGISTRATION
        }
        //if (diff &amp;gt; 0) {
        let _otp: any = await this.otpIncorrectService.create(createDto);
        console.log('otp tokennnnnnnnnn errorrrr', _otp)
        let otpErrorCount: any = await this.otpIncorrectModel.count({ email: email});
        console.log('Otp error count',otpErrorCount, 'If the attempts of failure are greater than 10, block this account. Create blockedCollection.')
        if(otpErrorCount &amp;gt; OTP_RETRY_LIMIT.MAXIMUM_OTP_RETRY_LIMIT){
          let _blocked: any = await this.blockedAccountService.findOneByQuery({email: email, type: BLOCKED_ACCOUNT_TYPE.USER_REGISTRATION})
          if(_blocked == null){
            let _blocked: any = await this.blockedAccountService.create(createBlockedAccountDto);
            //console.log('Your account is added to blocked list. BLOCKED LIST BLOCKED LIST BLOCKED LIST', _blocked);

            await this.emailService.sendEmail(
              new UserCycloanAccountBlockedEmail(
                new EmailDto({
                  to: body.email,
                  metaData: { email, //firstName, lastName 
                  },
                })
              )
            );
            console.log('Blocked Account email sent.................');
          }
          console.log('Your account is added to blocked list. BLOCKED LIST BLOCKED LIST BLOCKED LIST', _blocked);
          throw new BadRequestException(BLOCKED_ACCOUNT_ERROR(email))
        }
        throw new BadRequestException(OTP_ERROR);

      }
      break;
    default:
      throw new BadRequestException(UNKNOWN_PARAM(emailOrMobile));
  }
  let updated = await this.otpService.edit(_otp.id, update, updateTime);
  const _data = plainToClass(OtpDto, updated, {
    excludeExtraneousValues: true,
  });
  return success(_data);
}

}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
Link to code: [Link]&lt;a href="https://gitlab.com/adh.ranjan/nestjs/-/tree/dSuahailTwo" rel="noopener noreferrer"&gt;https://gitlab.com/adh.ranjan/nestjs/-/tree/dSuahailTwo&lt;/a&gt;

</description>
      <category>node</category>
      <category>mongodb</category>
      <category>nestjs</category>
      <category>speakeasy</category>
    </item>
    <item>
      <title>Custom role based access in Nest.js, MongoDB</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Sat, 31 Oct 2020 06:47:16 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/custom-role-based-access-in-nest-js-mongodb-5b3b</link>
      <guid>https://dev.to/krishnakurtakoti/custom-role-based-access-in-nest-js-mongodb-5b3b</guid>
      <description>&lt;p&gt;Here, we have four roles: Sme, Sponsor, Admin, Operations.Initially, we had only 3 roles.Operations role was added later and Operations user has permissions similar to the Admin user.In the code, we had to replace every instance of &lt;strong&gt;if (user.type == USER_TYPES.ADMIN)&lt;/strong&gt; with &lt;strong&gt;if (user.type == USER_TYPES.ADMIN || user.type == USER_TYPES.OPERATIONS)&lt;/strong&gt;.As this is time consuming and we can also miss many instances, we have created a roles module. In the roles module,the roles are defined along with their respective permissions as seen in Code (Part-III). Based on the permissions for each role, we will evaluate the authorization for the user in each of our controller methods.If the user has access, only then he will be granted the resources.&lt;br&gt;
&lt;strong&gt;Code (Part-I):&lt;/strong&gt;&lt;br&gt;
&lt;em&gt;src/common/constants/enum.ts&lt;/em&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    export enum USER_TYPES {
  SME = "Sme",
  SPONSOR = "Sponsor",
  ADMIN = "Admin",
  OPERATIONS_TEAM = "Operations"
}

//rolesAccessAction 
export enum ROLES_ACCESS_ACTION {
  USERS_CONTROLLER_FINDLIST_OPERATIONS = "users.controller.findList_operations",
  USERS_CONTROLLER_FINDLIST_ADMIN = "users.controller.findList_admin",
  USERS_CONTROLLER_FIND_ONE = "users.controller.findOne",
  USERS_CONTROLLER_KYC_FILTER = "users.controller.findListFilterKYCStatus",
  USERS_CONTROLLER_USER_STATUS_FILTER = "users.controller.findListFilterUserStatus",
  USERS_CONTROLLER_USER_UPDATE = "users.controller.update",
  USERS_CONTROLLER_DELETE = "users.controller.delete",
  USERS_SERVICE_CHECK_FOR_UPDATE_STATUS_ERROR = "users.service.checkForUpdateStatusError",
  USERS_SERVICE_CREATE = "users.service.create",
  USERS_SERVICE_UPDATE_USER_CRMID_AND_ENTITYDETAILCODE = "users.service.updateUserCrmIdAndEntityDetailCode",
  REMARKS_CONTROLLER_CREATE = "remarks.controller.create",
  REMARKS_CONTROLLER_FINDLIST = "remarks.controller.findList",
  REMARKS_CONTROLLER_FINDLIST_SME = "remarks.controller.findList_sme",
  REMARKS_CONTROLLER_FINDLIST_SPONSOR = "remarks.controller.findList_sponsor",
  SME_PROJECT_CONTROLLER_FINDLIST = "sme-project.controller.findList",
  SME_PROJECT_CONTROLLER_FINDONE = "sme-project.controller.findOne",
  SME_PROJECT_CONTROLLER_RECOMMENDED_PROJECTS = "sme-project.controller.getRecommendedProjects",
  SME_PROJECT_CONTROLLER_CREATE = "sme-project.controller.create",
  SME_PROJECT_CONTROLLER_SPONSOR_FILTER = "sme-project.controller.projectSponsorFilter",
  SME_PROJECT_CONTROLLER_UPDATE = "sme-project.controller.update",
  BID_DETAILS_CONTROLLER_FINDLIST = "bid-details.controller.findList",
  BID_DETAILS_CONTROLLER_FINDLIST_SME = "bid-details.controller.findList_sme",
  BID_DETAILS_CONTROLLER_FINDLIST_SPONSOR = "bid-details.controller.findList_sponsor",
  BID_DETAILS_CONTROLLER_CREATE =  "bid-details.controller.create",
  BID_DETAILS_CONTROLLER_COMPLETE_BID_PROCESS = "bid-details.controller.completeBidProcess",
  BID_DETAILS_CONTROLLER_REJECT_ALL_BIDS_DELETE_PROJECT = "bid-details.controller.rejectAllBidsDeleteProject",
  BID_DETAILS_CONTROLLER_UPDATE = "bid-details.controller.update",
  BID_DETAILS_CONTROLLER_UPDATE_SPONSOR = "bid-details.controller.update_sponsor",
  BID_DETAILS_SERVICE_CALCULATE_BID_DETAILS = "bid-details.controller.calculatebiddetails",
  BID_DETAILS_CONTROLLER_CREATE_TRANSACTION = "bid-details.controller.createTransaction"
  //BID_DETAILS_CONTROLLER_GET_FUNDED_PROJECTS = "bid-details.controller.getfundedProjects"
}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
Above, we have defined the rolesAction for each of the methods in our project.The convention used here is the controller/service name of the file followed by method name. For example, USERS_CONTROLLER_FINDLIST_OPERATIONS = "users.controller.findList_operations", we have users.controller as the controller name followed by method name as findList.&lt;br&gt;
&lt;strong&gt;Code (Part-II):&lt;/strong&gt;&lt;br&gt;
&lt;em&gt;src/users/users.controller.ts&lt;/em&gt;&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import {
  Body,
  Controller,
  Param,
  Post,
  UseGuards,
  Get,
  Request,
  Query,
  Put,
  NotFoundException,
  Delete,
  BadRequestException,
} from "@nestjs/common";
import { UsersService } from "./users.service";
import {
  CreateUserDto,
  UpdateUserDto,
  UserDto,
  CloseAccount,
} from "./objects/create-user.dto";
import { abstractBaseControllerFactory } from "../common/base/base.controller";
import { LoggedInToken } from "./objects/login-user.dto";
import {
  BASEROUTES,
  USER_TYPES,
  KYC_VERIFICATION_STATUS,
  USER_STATUS,
  ROLES_ACCESS_ACTION,
} from "../common/constants/enum";
import { JwtAuthGuard } from "../auth/auth.guard";
import {
  RequestUser,
  LastUpdatedTime,
  IdOrCodeParser,
} from "../common/utils/controller.decorator";
import {
  UNKNOWN_PARAM,
  NOT_FOUND,
  PAGE_NOT_FOUND_404,
  NEW_PASSWORD_AND_CONFIRM_NEW_PASSWORD_ERROR,
  USERNAME_OR_PASSWORD_INCORRECT,
  CURRENT_PASSWORD_AND_NEW_PASSWORD_ERROR,
  KYC_PENDING_STATUS_CHANGE_ERROR,
  KYC_APPROVED_STATUS_CHANGE_ERROR,
  KYC_REJECTED_STATUS_CHANGE_ERROR,
  USER_ACTIVE_STATUS_CHANGE_ERROR,
  USER_CLOSED_STATUS_CHANGE_ERROR,
  USER_IN_REVIEW_STATUS_CHANGE_ERROR,
  USER_KYC_INCOMPLETE_STATUS_CHANGE_ERROR,
  ONLY_FOR_ADMIN,
} from "../common/constants/string";
import { plainToClass } from "class-transformer";
import { success } from "../common/base/httpResponse.interface";
import { AbstractClassTransformerPipe } from "../common/pipes/class-transformer.pipe";
import { normalizeObject } from "../common/utils/helper";
import * as _ from "lodash";
import { InjectModel } from "@nestjs/mongoose";
import { Model } from "mongoose";
import { IUser, User } from "./objects/user.schema";
import { CrmService } from "./crm/crm.service";
import { KycPendingEmail } from "./objects/user.registered.email";
import { EmailDto } from "../email/objects/email.dto";
import { normalizePaginateResult } from "../common/interfaces/pagination";
import { RolesService } from "../roles/roles.service";

const BaseController = abstractBaseControllerFactory({
  DTO: UserDto,
  DisabledRoutes: [
    BASEROUTES.PATCH,
    //, BASEROUTES.DETELEONE
  ],
});

@Controller("users")
export class UsersController extends BaseController {
  constructor(
    private usersService: UsersService,
    private crmService: CrmService,
    @InjectModel("User") private readonly usersModel: Model,
    private rolesservice: RolesService
  ) {
    super(usersService);
  }

@UseGuards(JwtAuthGuard)
  @Get()
  async findList(@Request() req, @Query() query, @RequestUser() user) {
   let _user = await this.rolesservice.findOneByQuery({roleName: user.type})
    // if(user.type == USER_TYPES.OPERATIONS_TEAM){
    let hasAccessOperations = _user.rolesAccessAction.some(
      (e) =&amp;gt; e === ROLES_ACCESS_ACTION.USERS_CONTROLLER_FINDLIST_OPERATIONS
    );
    let hasAccessAdmin = _user.rolesAccessAction.some(
      (e) =&amp;gt; e === ROLES_ACCESS_ACTION.USERS_CONTROLLER_FINDLIST_ADMIN
    );
if(hasAccessOperations) {
  console.log('userstype', user.type, _user, hasAccessOperations, hasAccessAdmin)
  let t = { $or: [{ type: USER_TYPES.SME }, { type: USER_TYPES.SPONSOR }] };
  return await super.findList(req, { ...query, ...t });
}
//if (user.isAdmin) {
  if (hasAccessAdmin){
  // &amp;lt;--- only admin can see the user lists
  return await super.findList(req, { ...query });
}
throw new NotFoundException();

}

@UseGuards(JwtAuthGuard)
  @Get(":idOrCode")
  async findOne(
    @IdOrCodeParser("idOrCode") idOrCode: string,
    @RequestUser() user
  ) {
    let _user = await this.rolesservice.findOneByQuery({roleName: user.type});
    console.log('userstype', user.type, _user)
    let hasAccess = _user.rolesAccessAction.some(
      (e) =&amp;gt; e === ROLES_ACCESS_ACTION.USERS_CONTROLLER_FIND_ONE
    );
if (hasAccess || user.code === idOrCode || user.id === idOrCode) {
  // &amp;lt;--- only admin or the same person can view a profile
  return await super.findOne(idOrCode);
}
throw new NotFoundException();

}

@UseGuards(JwtAuthGuard)
  @Post("filter/kycFilter")
  async findListFilterKYCStatus(
    @Request() req,
    @Query() query,
    @RequestUser() user,
    @Body() body: { isProfileCompleted: number }
  ) {
    let t = { $or: [{ type: USER_TYPES.SME }, { type: USER_TYPES.SPONSOR }] };
    //if (user.type == USER_TYPES.ADMIN) {
      let _user = await this.rolesservice.findOneByQuery({roleName: user.type});
      console.log('userstype', user.type, _user)
      let hasAccess = _user.rolesAccessAction.some(
        (e) =&amp;gt; e === ROLES_ACCESS_ACTION.USERS_CONTROLLER_KYC_FILTER
      );
     // console.log('userstype', user.type, _user, hasAccess)
   if(hasAccess) {
    var options = {
        limit: 30,
        page: 1,
        sort: "_id",
        skip: query.page ? (query.page - 1) : 0
      };
  // &amp;lt;--- only admin and Sponsor can see all the USER lists
  console.log("filterrrrrrrrrrr", query, query.page, body.isProfileCompleted);

  let d = await this.usersModel.find(
    { "verification.isProfileCompleted": body.isProfileCompleted, ...t },
    {},
    { sort: { _id: 1 }, skip: options.skip * options.limit, limit: options.limit, projection: {} }
  );
  let dCount = await this.usersModel.count(
    { "verification.isProfileCompleted": body.isProfileCompleted }
  );
  console.log(d.length, dCount);
  await d.map((data) =&amp;gt; {
    return plainToClass(UserDto, data, { excludeExtraneousValues: true });
  });
  let pagination = normalizePaginateResult({
    total: dCount,//d.length,
    limit: options.limit,
    page: options.page,
    pages: d.pages,
  });

  return success({ d, pagination });
}
throw new NotFoundException();

}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
In the 3 methods listed above, findList, findOne, findListFilterKYCStatus, we are checking if the user has access/authorization.For the method findListFilterKYCStatus &lt;strong&gt;let hasAccess = _user.rolesAccessAction.some(&lt;br&gt;
        (e) =&amp;gt; e === ROLES_ACCESS_ACTION.USERS_CONTROLLER_KYC_FILTER&lt;br&gt;
      );&lt;/strong&gt; , we are checking if the user has &lt;strong&gt;ROLES_ACCESS_ACTION.USERS_CONTROLLER_KYC_FILTER&lt;/strong&gt; listed in his roles Schema as shown below in the file.Here, only users of type &lt;strong&gt;USER_TYPES.OPERATIONS_TEAM&lt;/strong&gt; and &lt;strong&gt;USER_TYPES.ADMIN&lt;/strong&gt; have the permissions and only they are allowed access to the findListFilterKYCStatus() method. &lt;br&gt;
Code (Part-III):&lt;br&gt;
&lt;em&gt;src/roles/roles.controller.ts&lt;/em&gt;&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import { RolesDto, CreateRolesDto } from './objects/roles.dto';
import { abstractBaseControllerFactory } from '../common/base/base.controller';
import { BASEROUTES, USER_TYPES, ROLES_ACCESS_ACTION } from '../common/constants/enum';
import { RolesService } from './roles.service';
import { JwtAuthGuard } from '../auth/auth.guard';
import {
  Controller,
  Get,
  UseGuards,
  Request,
  Query,
  Put,
  Body,
  Post,
  BadRequestException,
  NotFoundException,
  Delete,
} from "@nestjs/common";
import { AbstractClassTransformerPipe } from '../common/pipes/class-transformer.pipe';
import { RequestUser } from '../common/utils/controller.decorator';
import { plainToClass } from 'class-transformer';
import { success } from '../common/base/httpResponse.interface';

const BaseController = abstractBaseControllerFactory({
    DTO: RolesDto,
    //Todo: Remove after creating records in Db.
    CreateDTO: CreateRolesDto,
    DisabledRoutes: [
      //Todo: Uncomment BASEROUTES.CREATE after creating records in Db.
      // BASEROUTES.CREATE,
     // BASEROUTES.DETELEONE,
  BASEROUTES.PATCH,
 // BASEROUTES.UPDATEONE,
],

});

@UseGuards(JwtAuthGuard)
@Controller('roles')
export class RolesController extends BaseController {
    constructor(private rolesservice: RolesService) {
        super(rolesservice);
      }
      @Post()
      public async create(
        @Request() req,
        @Body(AbstractClassTransformerPipe(CreateRolesDto)) body: any,
        @Query() query,
        @RequestUser() user
      ) {
         switch(body.roleName){
           case USER_TYPES.ADMIN:
             body.rolesAccessAction = [
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_FINDLIST_ADMIN,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_FIND_ONE,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_KYC_FILTER,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_USER_STATUS_FILTER,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_USER_UPDATE,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_DELETE,
              ROLES_ACCESS_ACTION.USERS_SERVICE_CHECK_FOR_UPDATE_STATUS_ERROR,
              ROLES_ACCESS_ACTION.USERS_SERVICE_CREATE,
              ROLES_ACCESS_ACTION.USERS_SERVICE_UPDATE_USER_CRMID_AND_ENTITYDETAILCODE,
              ROLES_ACCESS_ACTION.REMARKS_CONTROLLER_CREATE,
              ROLES_ACCESS_ACTION.REMARKS_CONTROLLER_FINDLIST,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_FINDLIST,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_FINDONE,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_UPDATE,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_FINDLIST,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_COMPLETE_BID_PROCESS,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_REJECT_ALL_BIDS_DELETE_PROJECT,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_UPDATE,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_CREATE_TRANSACTION
             ];
             break;
           case USER_TYPES.OPERATIONS_TEAM:
            body.rolesAccessAction = [
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_FINDLIST_OPERATIONS,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_FIND_ONE,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_KYC_FILTER,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_USER_STATUS_FILTER,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_USER_UPDATE,
              ROLES_ACCESS_ACTION.USERS_CONTROLLER_DELETE,
              ROLES_ACCESS_ACTION.USERS_SERVICE_CHECK_FOR_UPDATE_STATUS_ERROR,
              ROLES_ACCESS_ACTION.USERS_SERVICE_CREATE,
              ROLES_ACCESS_ACTION.USERS_SERVICE_UPDATE_USER_CRMID_AND_ENTITYDETAILCODE,
              ROLES_ACCESS_ACTION.REMARKS_CONTROLLER_CREATE,
              ROLES_ACCESS_ACTION.REMARKS_CONTROLLER_FINDLIST,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_FINDLIST,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_FINDONE,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_UPDATE,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_FINDLIST,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_COMPLETE_BID_PROCESS,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_REJECT_ALL_BIDS_DELETE_PROJECT,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_UPDATE,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_CREATE_TRANSACTION
            ];
             break;
           case USER_TYPES.SME:
            body.rolesAccessAction = [
              ROLES_ACCESS_ACTION.USERS_SERVICE_UPDATE_USER_CRMID_AND_ENTITYDETAILCODE,
              ROLES_ACCESS_ACTION.REMARKS_CONTROLLER_FINDLIST_SME,
              ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_CREATE,
              ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_FINDLIST_SME
            ];
         break;
       case USER_TYPES.SPONSOR:
        body.rolesAccessAction = [
          ROLES_ACCESS_ACTION.USERS_SERVICE_UPDATE_USER_CRMID_AND_ENTITYDETAILCODE,
          ROLES_ACCESS_ACTION.REMARKS_CONTROLLER_FINDLIST_SPONSOR,
          ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_FINDLIST,
          ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_FINDONE,
          ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_RECOMMENDED_PROJECTS,
          ROLES_ACCESS_ACTION.SME_PROJECT_CONTROLLER_SPONSOR_FILTER,
          ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_FINDLIST_SPONSOR,
          ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_CREATE,
          ROLES_ACCESS_ACTION.BID_DETAILS_CONTROLLER_UPDATE_SPONSOR,
          ROLES_ACCESS_ACTION.BID_DETAILS_SERVICE_CALCULATE_BID_DETAILS
        ];
         break; 
      } 
      let roles =  await this.rolesservice.create(body);
      const _data = plainToClass(RolesDto, roles, { excludeExtraneousValues: true });
      return success(_data);
  }

}


  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
The role's permissions are stored in the backend (MongoDB)&lt;br&gt;
Code (Part-IV):&lt;br&gt;
&lt;em&gt;src/roles/objects/roles.schema.ts&lt;/em&gt;&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import { Schema } from "mongoose";
import { createModel, Entity, IEntity } from "../../common/base/base.model";

export class Roles extends Entity {
  roleName: string;
  roleCode: string;
  type: string;
  rolesAccessAction: string[];
}

export interface IRoles extends Roles, IEntity {
  id: string;
}

export const RolesSchema: Schema = createModel("AdminRoles", {
  roleName: { type: String, required: true },
  roleCode: { type: String, required: true },
  type: { type: String, required: true},
  rolesAccessAction: [
    {
      type: String,
    },
  ]
});

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
This is how custom role based access is implemented without any 3rd party libraries.&lt;br&gt;
Source code: [Link] &lt;a href="https://gitlab.com/adh.ranjan/nestjs" rel="noopener noreferrer"&gt;https://gitlab.com/adh.ranjan/nestjs&lt;/a&gt;

</description>
      <category>nestjs</category>
      <category>mongodb</category>
      <category>typescript</category>
      <category>mongoose</category>
    </item>
    <item>
      <title>Multiple search filter using Nestjs, mongoose.</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Wed, 07 Oct 2020 09:36:18 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/multiple-search-filter-using-nestjs-mongoose-77e</link>
      <guid>https://dev.to/krishnakurtakoti/multiple-search-filter-using-nestjs-mongoose-77e</guid>
      <description>&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;The fields are all optional.The user can search for results using multiple fields given below:
1. Select Country
2. Project Type
3. Practice Area
4. Investment Range - Less than /Greater than
5. Project tenure
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;
&lt;p&gt;Here, the fields are passed as query parameters and also as body in the Post Http request.Tenure, country, and fundsReuired are passed in the body json while practiceArea and type are sent as query parameters.&lt;br&gt;
POST &lt;a href="http://localhost:3000/api/v1/sme-project/filter/projectTenureFilter?practiceArea=Hardware&amp;amp;type=Private" rel="noopener noreferrer"&gt;http://localhost:3000/api/v1/sme-project/filter/projectTenureFilter?practiceArea=Hardware&amp;amp;type=Private&lt;/a&gt;&lt;/p&gt;
&lt;div class="highlight js-code-highlight"&gt;
&lt;pre class="highlight plaintext"&gt;&lt;code&gt;    "country": "Australia",
    "fundsRequired": 20000,
    "lessThanFundsRequired": false
}
&lt;/code&gt;&lt;/pre&gt;

&lt;/div&gt;
&lt;p&gt;a. If tenure is not empty, lines 22 to 35 are executed.&lt;br&gt;
b. If practiceArea is not empty, lines 37 to 39 are executed.&lt;br&gt;
c. If type is not empty, lines 40 to 42 are exectuted.&lt;br&gt;
d. If fundsRequired is not empty, lines 44 to 52 are exectuted.&lt;br&gt;
e. If country field is not empty, lines 55 to 85 are executed.&lt;br&gt;
Like this, the query object is constructed for only the fields that are entered by user in search.This is how multiple search query is built and mongoose used for query.Also pagination is done for search results.&lt;br&gt;
&lt;strong&gt;Code (Part-I):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    import {
  Controller,
  Get,
  UseGuards,
  Request,
  Query,
  Put,
  Body,
  Post,
  BadRequestException,
  NotFoundException,
  Delete,
} from "@nestjs/common";
import {
  SmeProjectDto,
  CreateSmeProjectDto,
  UpdateSmeProjectDto,
} from "./objects/sme-project.dto";
import {
  BASEROUTES,
  USER_TYPES,
  ACTIVITY_FEED_EVENTS_SME,
  ACTIVITY_FEED_EVENTS_SPONSOR,
  PROJECT_STATUS,
  Status,
  KYC_VERIFICATION_STATUS,
  PROJECT_TENURE,
  TIMELINE_PREFERENCE_0,
} from "../common/constants/enum";
import { abstractBaseControllerFactory } from "../common/base/base.controller";
import { SmeProjectService } from "./sme-project.service";
import {
  RequestUser,
  IdOrCodeParser,
} from "../common/utils/controller.decorator";
import { JwtAuthGuard } from "../auth/auth.guard";
import { success } from "../common/base/httpResponse.interface";
import { AbstractClassTransformerPipe } from "../common/pipes/class-transformer.pipe";
import {
  ONLY_FOR_SME,
  PROJECT_EDIT_ERROR,
  PROJECT_APPROVED_STATUS_CHANGE_ERROR,
  PROJECT_FUNDED_STATUS_CHANGE_ERROR,
  PROJECT_COMPLETED_STATUS_CHANGE_ERROR,
  PROJECT_PENDING_STATUS_CHANGE_ERROR,
  PROJECT_REJECTED_STATUS_CHANGE_ERROR,
  PROJECT_DOCUMENTS_REQUIRED_STATUS_CHANGE_ERROR,
  ONLY_FOR_CREATED_BY_SME_OR_ADMIN,
  PROJECT_APPROVED_STATUS_CHANGE_ERROR1,
  FUNDS_REQUESTED_ERROR,
  CANNOT_DELETE_FUNDED_PROJECTS,
  PROJECT_PENDING_STATUS_CHANGE_ERROR_SME_KYC_NOT_APPROVED,
  ONLY_FOR_SPONSOR,
  USER_PREFERENCES_NOT_FILLED,
} from "../common/constants/string";
import { SmeActivityFeedService } from "../sme-activity-feed/sme-activity-feed.service";
import { use } from "passport";
import { ActivityFeedService } from "../activity-feed/activity-feed.service";
import { InjectModel } from "@nestjs/mongoose";
import { Model } from "mongoose";
import { ISmeProject } from "./objects/sme-project.schema";
import { BidDetailsService } from "../bid-details/bid-details.service";
import { IBidDetails } from "../bid-details/objects/bid-details.schema";
import { UsersService } from "../users/users.service";
import { plainToClass } from "class-transformer";
import { normalizePaginateResult } from "../common/interfaces/pagination";
import { IEntityDetails } from "../entity-details/objects/entity-details.schema";
import { IFavouriteProjects } from "../favourite-projects/objects/favourite-projects.schema";

const BaseController = abstractBaseControllerFactory({
  DTO: SmeProjectDto,
  CreateDTO: CreateSmeProjectDto,
  UpdateDTO: UpdateSmeProjectDto,
  DisabledRoutes: [
    //BASEROUTES.DETELEONE,
    BASEROUTES.PATCH,
  ],
});

@UseGuards(JwtAuthGuard)
@Controller("sme-project")
export class SmeProjectController extends BaseController {
  constructor(
    private smeProjectService: SmeProjectService,
    private smeactivityFeedService: SmeActivityFeedService,
    private activityFeedService: ActivityFeedService,
    private usersService: UsersService,
    //  private bidDetailsService: BidDetailsService,
    //  @InjectModel("BidDetails")
    //  private readonly bidDetailsModel: Model,
    @InjectModel("SmeProject")
    private readonly smeProjectModel: Model,
    @InjectModel("EntityDetails")
    private readonly entityDetailsModel: Model,
    @InjectModel("FavouriteProject")
    private readonly favouriteProjectsModel: Model,
  ) {
    super(smeProjectService);
  }
    @UseGuards(JwtAuthGuard)
    @Post("filter/projectSponsorFilter")
    async findListFilterKYCStatus(
      @Request() req,
      @Query() query,
      @RequestUser() user,
      @Body() body: { tenure: string; country: string; fundsRequired: number; lessThanFundsRequired: boolean; }
    ) {
      if (user.type == USER_TYPES.ADMIN || user.type == USER_TYPES.SPONSOR) {
    var options = {
      limit: 3,
      page: 1,
      sort: "_id",
      skip: query.page ? (query.page - 1) : 0
    };
    let d: any = [];
    let _q: any;

    if (body.tenure !== undefined) {
      switch(body.tenure){
        case(TIMELINE_PREFERENCE_0.LESS_THAN_SIX_MONTHS):
          _q = { tenure: { $lt: 6 } };
          break;
        case(TIMELINE_PREFERENCE_0.SIX_MONTHS_TO_ONE_YEAR):
          let t = { $and: [{ tenure: { $lte: 12 } }, { tenure: { $gt: 6 } }] };
          _q = { ...t, ..._q };
          break;
        case(TIMELINE_PREFERENCE_0.GREATER_THAN_ONE_YEAR):
          _q = { tenure: { $gt: 12 } };
          break;

      }
    }

    if (query.practiceArea !== undefined) {
      _q = { practiceArea: query.practiceArea, ..._q };
    }
    if (query.type !== undefined) {
      _q = { type: query.type, ..._q };
    }

    if (body.fundsRequired !== undefined &amp;amp;&amp;amp; body.lessThanFundsRequired !== undefined) {
      switch(body.lessThanFundsRequired){
        case false:
          _q = { fundsRequired: { $gt: body.fundsRequired } };
          break;
        case true:
          _q = { fundsRequired: { $lte: body.fundsRequired } }; 
      }
    }

    if (body.country !== undefined) {
      let projectId: string;
      var userIdsArray: string[] = [];
      var smeUserIdsArray: string[] = [];
      let _entityDetails = await this.entityDetailsModel.find(
        { "company.address.country": body.country },
        {},
        {
          sort: { _id: 1 },
          //skip: 0,
          // limit: 30,
          projection: {},
        }
      );

      await Promise.all(
        _entityDetails.map(async (element) =&amp;gt; {
          await userIdsArray.push(element.createdBy);
          console.log("userssssssssArray", userIdsArray);
        })
      );
      await Promise.all(
        userIdsArray.map(async (element) =&amp;gt; {
          let k = await this.usersService.findOne(element);
          if (k.type == USER_TYPES.SME) {
            await smeUserIdsArray.push(k.code);
          }
        })
      );
      let q = { createdBy: smeUserIdsArray };
      _q = { ...q, ..._q };
    }
    _q = { status: PROJECT_STATUS.APPROVED, ..._q };

    d = await this.smeProjectModel.find(
      _q,
      {},  { sort: { _id: 1 },
      skip: options.skip * options.limit, limit: options.limit,projection: {}}
    );

    let dCount = await this.smeProjectModel.count(
      _q,
    );


    var projectIdsArray: string[] = [];
    await d.map(async (data) =&amp;gt; {
      await projectIdsArray.push(data.code);
    });
    let q = { code: projectIdsArray}
    let k = await this.findList(req, q, user );
    let pagination = normalizePaginateResult({
      total: dCount,//d.length,
      limit: options.limit,
      page: query.page,//options.page,
      pages: d.pages,
    });
    return { k, ...pagination };


  }
}

}

  &lt;/code&gt;
&lt;/div&gt;

&lt;p&gt;&lt;br&gt;&lt;br&gt;
  &lt;a href="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ffgqw96n0okeym6l0a3hf.png" class="article-body-image-wrapper"&gt;&lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fi%2Ffgqw96n0okeym6l0a3hf.png" alt="Screenshot from 2020-10-07 15-01-46cycloanHundredTwenty" width="800" height="449"&gt;&lt;/a&gt;&lt;/p&gt;

</description>
      <category>nestjs</category>
      <category>typescript</category>
      <category>mongoose</category>
      <category>mongodb</category>
    </item>
    <item>
      <title>Dependency Injection in Nest.js</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Mon, 22 Jun 2020 08:54:26 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/dependency-injection-in-nest-js-2a7j</link>
      <guid>https://dev.to/krishnakurtakoti/dependency-injection-in-nest-js-2a7j</guid>
      <description>&lt;p&gt;Dependency Injection&lt;/p&gt;

&lt;p&gt;We have a project module and a bid module.&lt;br&gt;
The project record(in MongoDB) is created by user of type "Sme" and &lt;br&gt;
the bid record(in MongoDB) is created by user of type "Sponsor".&lt;br&gt;
A &lt;strong&gt;Sponsor&lt;/strong&gt; can &lt;strong&gt;bid&lt;/strong&gt; for a &lt;strong&gt;project created&lt;/strong&gt; by &lt;strong&gt;Sme&lt;/strong&gt;.&lt;/p&gt;

&lt;p&gt;1.The Project module is shown below with the path &lt;br&gt;
&lt;em&gt;src/sme-project/sme-project.module.ts.&lt;/em&gt;&lt;br&gt;
The BidDetailsModule is included within this SmeProjectModule.&lt;br&gt;
Note here that the &lt;strong&gt;SmeProjectService class&lt;/strong&gt; is &lt;strong&gt;exported&lt;/strong&gt; from &lt;strong&gt;this module&lt;/strong&gt; and can be &lt;strong&gt;injected&lt;/strong&gt; in other modules.&lt;br&gt;
The &lt;strong&gt;SmeProjectService&lt;/strong&gt; class is injected in the &lt;strong&gt;BidDetailsService&lt;/strong&gt; class in the later part.&lt;br&gt;
&lt;strong&gt;Code (Part-I):&lt;/strong&gt;&lt;br&gt;
&lt;/p&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import { Module } from "@nestjs/common";
import { SmeProjectController } from "./sme-project.controller";
import { SmeProjectService } from "./sme-project.service";
import { MongooseModule } from "@nestjs/mongoose";
import { SmeProjectSchema } from "./objects/sme-project.schema";
import { ProjectComplexityModule } from "../project-complexity/project-complexity.module";
import { PracticeAreaModule } from "../practice-area/practice-area.module";
import { MilestoneModule } from "../milestone/milestone.module";
import { RemarksModule } from "../remarks/remarks.module";
import { BidDetailsModule } from "../bid-details/bid-details.module";
import { ProjectStatusModule } from "../project-status/project-status.module";

@Module({
  imports: [
    MongooseModule.forFeature([
      { name: "SmeProject", schema: SmeProjectSchema },
    ]),
    ProjectComplexityModule,
    PracticeAreaModule,
    MilestoneModule,
    ProjectStatusModule,
    RemarksModule,
    BidDetailsModule
  ],
exports: [SmeProjectService],
  controllers: [SmeProjectController],
  providers: [SmeProjectService],
})
export class SmeProjectModule {}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
The &lt;strong&gt;BidDetailsModule&lt;/strong&gt; module is shown below with the path &lt;em&gt;src/bid-details/bid-details.module.ts.&lt;/em&gt;&lt;br&gt;
&lt;strong&gt;Code(Part-II):&lt;/strong&gt;&lt;br&gt;
&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
import { Module, forwardRef } from '@nestjs/common';
import { BidDetailsController } from './bid-details.controller';
import { BidDetailsService } from './bid-details.service';
import { BidDetailsSchema } from './objects/bid-details.schema';
import { MongooseModule } from '@nestjs/mongoose';
import { WalletTransactionsService } from '../wallet-transactions/wallet-transactions.service';
import { WalletTransactionsModule } from '../wallet-transactions/wallet-transactions.module';
import { WalletTransactionsSchema } from '../wallet-transactions/objects/wallet-transactions.schema';
import { UsersModule } from '../users/users.module';
import { UserSchema } from '../users/objects/user.schema';
import { SmeProjectSchema } from '../sme-project/objects/sme-project.schema';
import { SmeProjectModule } from '../sme-project/sme-project.module';
import { SmeProjectService } from '../sme-project/sme-project.service';

@Module({
  imports: [
    MongooseModule.forFeature([
      { name: "BidDetails", schema: BidDetailsSchema },
      { name: "WalletTransaction", schema: WalletTransactionsSchema },
      { name: "User", schema: UserSchema },
      { name: "SmeProject", schema: SmeProjectSchema },
    ]),
    WalletTransactionsModule,
    forwardRef(() =&amp;gt; UsersModule),
    forwardRef(() =&amp;gt; SmeProjectModule),
    //SmeProjectModule,
  ],
  exports: [BidDetailsService, WalletTransactionsService],
  controllers: [BidDetailsController],
  providers: [BidDetailsService, WalletTransactionsService,SmeProjectService]
})
export class BidDetailsModule {}

  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
We need to inject the &lt;strong&gt;SmeProjectService&lt;/strong&gt; class in the &lt;strong&gt;BidDetailsService&lt;/strong&gt; class in the &lt;em&gt;constructor&lt;/em&gt; as shown below.So we have imported SmeProjectModule inside BidDetailsModule.We have also &lt;strong&gt;included&lt;/strong&gt; the &lt;strong&gt;SmeProjectService&lt;/strong&gt; in the &lt;strong&gt;providers&lt;/strong&gt; of this &lt;strong&gt;BidDetailsModule&lt;/strong&gt;.&lt;br&gt;
Since &lt;strong&gt;BidDetailsModule&lt;/strong&gt; is already included in the &lt;strong&gt;SmeProjectModule&lt;/strong&gt; Code(Part-I),importing it directly will lead to &lt;strong&gt;circular dependency&lt;/strong&gt;.&lt;br&gt;
We use &lt;strong&gt;forwardRef(() =&amp;gt; SmeProjectModule)&lt;/strong&gt; to solve this in place of just SmeProjectModule.

&lt;p&gt;The &lt;strong&gt;BidDetailsService&lt;/strong&gt; class is shown below with the path &lt;em&gt;src/bid-details/bid-details.service.ts.&lt;/em&gt;&lt;br&gt;
&lt;strong&gt;Code(Part-III):&lt;/strong&gt;&lt;/p&gt;


&lt;div class="runkit-element"&gt;
  &lt;code&gt;
    
// hidden setup JavaScript code goes in this preamble area
const hiddenVar = 42

  &lt;/code&gt;
  &lt;code&gt;
    
// visible, reader-editable JavaScript code goes here
import { Injectable } from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { Model } from "mongoose";
import { BaseService } from '../common/base/base.service';
import { IBidDetails } from './objects/bid-details.schema';
import { CreateWalletTransactionDto } from '../wallet-transactions/objects/wallet-transactions.dto';
import { TRANSACTION_TYPE, USER_TYPES } from '../common/constants/enum';
import { WalletTransactionsService } from '../wallet-transactions/wallet-transactions.service';
import { UsersService } from '../users/users.service';
import { EXISTS, TOKENS_FOR_SUBMISSION_ERROR } from '../common/constants/string';
import { IUser } from '../users/objects/user.schema';
import { SmeProjectService } from '../sme-project/sme-project.service';

@Injectable()
export class BidDetailsService extends BaseService {
    constructor(
        @InjectModel("BidDetails") private readonly bidDetailsModel: Model,
       private walletTransactionsService: WalletTransactionsService,
       private usersService: UsersService,
       private smeProjectService: SmeProjectService,
      ) {
        super(bidDetailsModel);
      }
     //Details of code...
}


  &lt;/code&gt;
&lt;/div&gt;
&lt;br&gt;
This is a sample of how dependencies can be worked in Nest.js.&lt;br&gt;
My Github Profile for code:&lt;br&gt;
Please see the develop branch in my repo.&lt;br&gt;
[Link]&lt;a href="https://github.com/krishnakurtakoti/nestTwo" rel="noopener noreferrer"&gt;https://github.com/krishnakurtakoti/nestTwo&lt;/a&gt;&lt;br&gt;
&lt;div class="ltag__user ltag__user__id__376617"&gt;
    &lt;a href="/krishnakurtakoti" class="ltag__user__link profile-image-link"&gt;
      &lt;div class="ltag__user__pic"&gt;
        &lt;img src="https://media2.dev.to/dynamic/image/width=800%2Cheight=%2Cfit=scale-down%2Cgravity=auto%2Cformat=auto/https%3A%2F%2Fdev-to-uploads.s3.amazonaws.com%2Fuploads%2Fuser%2Fprofile_image%2F376617%2Fa80c611f-bcdc-47de-a558-eff950ef49a0.png" alt="krishnakurtakoti image"&gt;
      &lt;/div&gt;
    &lt;/a&gt;
  &lt;div class="ltag__user__content"&gt;
    &lt;h2&gt;
&lt;a class="ltag__user__link" href="/krishnakurtakoti"&gt;Krishna Kurtakoti&lt;/a&gt;Follow
&lt;/h2&gt;
    &lt;div class="ltag__user__summary"&gt;
      &lt;a class="ltag__user__link" href="/krishnakurtakoti"&gt;Microservices Application developer&lt;/a&gt;
    &lt;/div&gt;
  &lt;/div&gt;
&lt;/div&gt;


</description>
      <category>nestjs</category>
      <category>mongodb</category>
      <category>node</category>
      <category>typescript</category>
    </item>
    <item>
      <title>Web Backend App: Adding a formatted Id to a document using Mongoose virtuals with MongoDB,Nest.js</title>
      <dc:creator>Krishna Kurtakoti</dc:creator>
      <pubDate>Wed, 10 Jun 2020 08:06:52 +0000</pubDate>
      <link>https://dev.to/krishnakurtakoti/web-backend-app-adding-a-formatted-id-to-a-document-using-mongoose-virtuals-with-mongodb-nest-js-4l7p</link>
      <guid>https://dev.to/krishnakurtakoti/web-backend-app-adding-a-formatted-id-to-a-document-using-mongoose-virtuals-with-mongodb-nest-js-4l7p</guid>
      <description>&lt;p&gt;Project ID format – PR0001&lt;/p&gt;

&lt;p&gt;The reader is a class which returns the json response object.&lt;br&gt;
The field "_formattedId" is a virtual field which does not exist in Database.&lt;br&gt;
Instead, a number field "formattedId" stores the index of the Project.&lt;br&gt;
So,if the "formattedId" value is 1,10,100, the value of the "_formattedId" will be "PR0001","PR0010","PR0100".&lt;br&gt;
These values are created by passing the "formattedId" values to pad function with width and z as additional arguments.&lt;br&gt;
[link]&lt;br&gt;
Link: &lt;a href="https://stackoverflow.com/questions/10073699/pad-a-number-with-leading-zeros-in-javascript" rel="noopener noreferrer"&gt;https://stackoverflow.com/questions/10073699/pad-a-number-with-leading-zeros-in-javascript&lt;/a&gt;&lt;br&gt;
&lt;strong&gt;Code:&lt;/strong&gt;&lt;br&gt;
&lt;em&gt;src\sme-project\objects\sme-project.dto.ts&lt;/em&gt;&lt;br&gt;
import { Expose, Type } from "class-transformer";&lt;br&gt;
import {&lt;br&gt;
  IsNotEmpty,&lt;br&gt;
  IsDefined,&lt;br&gt;
  MaxLength,&lt;br&gt;
  IsOptional,&lt;br&gt;
  ValidateNested,&lt;br&gt;
  IsString,&lt;br&gt;
  IsIn,&lt;br&gt;
  IsInt,&lt;br&gt;
} from "class-validator";&lt;br&gt;
import { Reader, Creator, Updater } from "../../common/base/base.dto";&lt;/p&gt;

&lt;p&gt;export class SmeProjectDto extends Reader {&lt;/p&gt;

&lt;p&gt;@Expose()&lt;br&gt;
  readonly &lt;strong&gt;_formattedId: string = "";&lt;/strong&gt;&lt;/p&gt;

&lt;p&gt;}&lt;/p&gt;

&lt;p&gt;export class CreateSmeProjectDto extends Creator {&lt;br&gt;
  constructor() {&lt;br&gt;
    super(true);&lt;br&gt;
  }&lt;br&gt;
  @IsDefined()&lt;br&gt;
  @IsNotEmpty()&lt;br&gt;
  @IsString()&lt;br&gt;
  @MaxLength(20, { message: "FundsRequired is too long" })&lt;br&gt;
  @ApiModelProperty()&lt;br&gt;
  readonly &lt;strong&gt;fundsRequired: string;&lt;/strong&gt;&lt;br&gt;
}&lt;/p&gt;

&lt;p&gt;&lt;em&gt;src\sme-project\objects\sme-project.schema.ts&lt;/em&gt;&lt;br&gt;
export class SmeProject extends Entity {&lt;br&gt;
    &lt;strong&gt;formattedId: number;&lt;/strong&gt;&lt;br&gt;
}&lt;/p&gt;

&lt;p&gt;export interface ISmeProject extends SmeProject, IEntity {&lt;br&gt;
  id: string;&lt;br&gt;
}&lt;/p&gt;

&lt;p&gt;Only the fields in the Schema are stored in the database.Here "formattedId" field is stored in the database.&lt;/p&gt;

&lt;p&gt;&lt;strong&gt;Code:&lt;/strong&gt;&lt;br&gt;
&lt;em&gt;src\sme-project\objects\sme-project.schema.ts&lt;/em&gt;&lt;br&gt;
import { Entity, IEntity, createModel } from "../../common/base/base.model";&lt;br&gt;
import { Schema } from "mongoose";&lt;br&gt;
import { pad } from "../sme-project.service";&lt;/p&gt;

&lt;p&gt;export const &lt;strong&gt;SmeProjectSchema&lt;/strong&gt;: Schema = createModel("SmeProjects", {&lt;br&gt;
     &lt;strong&gt;formattedId: { type: Number, required: true}&lt;/strong&gt;&lt;br&gt;
});&lt;/p&gt;

&lt;p&gt;SmeProjectSchema.virtual("_formattedId").get(function() {&lt;br&gt;
  return &lt;strong&gt;this._formattedId&lt;/strong&gt; = "PR" + &lt;strong&gt;pad(this.formattedId, 4, 0)&lt;/strong&gt;;&lt;br&gt;
  }&lt;br&gt;
);&lt;/p&gt;

&lt;p&gt;In the service class,we create another field "formattedId" which is equal to "total number of records + 1",since a new record is being created.&lt;br&gt;
&lt;em&gt;src/sme-project/sme-project.service.ts&lt;/em&gt;&lt;br&gt;
&lt;strong&gt;Code:&lt;/strong&gt;&lt;br&gt;
import { Injectable } from "@nestjs/common";&lt;br&gt;
import { Model } from "mongoose";&lt;br&gt;
import { BaseService } from "../common/base/base.service";&lt;br&gt;
import { ISmeProject } from "./objects/sme-project.schema";&lt;br&gt;
import { InjectModel } from "@nestjs/mongoose";&lt;br&gt;
import { CanLoadVirtual } from "../common/interfaces/can.load.virtual";&lt;/p&gt;

&lt;p&gt;import { Paginate } from "src/common/interfaces/pagination";&lt;/p&gt;

&lt;p&gt;export function &lt;strong&gt;pad(n, width, z)&lt;/strong&gt; {&lt;br&gt;
  z = z || '0';&lt;br&gt;
  n = n + '';&lt;br&gt;
  return n.length &amp;gt;= width ? n : new Array(width - n.length + 1).join(z) + n;&lt;br&gt;
}&lt;/p&gt;

&lt;p&gt;@Injectable()&lt;br&gt;
export class SmeProjectService extends BaseService&lt;br&gt;
  implements CanLoadVirtual {&lt;br&gt;
  constructor(&lt;br&gt;
    @InjectModel("SmeProject")&lt;br&gt;
    private readonly smeProjectModel: Model&lt;br&gt;
  ) {&lt;br&gt;
    super(smeProjectModel);&lt;br&gt;
  }&lt;br&gt;
  async doPreCreate(createDto: Partial): Promise {&lt;br&gt;
    let options: Paginate = {limit: 30, page: 1, sort: "_id"};&lt;br&gt;
    let query = null;&lt;br&gt;
    let _projects = await this.findAll(query,options);&lt;br&gt;
    &lt;strong&gt;createDto["formattedId"] = _projects.total + 1;&lt;/strong&gt;&lt;br&gt;
  }&lt;/p&gt;

&lt;p&gt;}&lt;br&gt;
}&lt;br&gt;
My Github Profile for code:&lt;br&gt;
Please see the develop branch in my repo.&lt;br&gt;
[Link]&lt;a href="https://github.com/krishnakurtakoti/nestTwo" rel="noopener noreferrer"&gt;https://github.com/krishnakurtakoti/nestTwo&lt;/a&gt;&lt;/p&gt;

</description>
      <category>mongodb</category>
      <category>node</category>
      <category>typescript</category>
      <category>nestjs</category>
    </item>
  </channel>
</rss>
