Saturday, July 11, 2020
MANY to MANY mapping using Collections
PFB
Practiced collection scenario:
Between LIST to LIST:
package com.surya.spring;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class Mul {
public static void main(String a[])
{
List l=new ArrayList();
List l1=new ArrayList();
l.add(1);
l.add(2);
l.add(3);
l1.add("banana");
l1.add("orange");
display1(l,l1);
}
public static void display1(List l,List l1)
{
/*Using Iterator approach between mapping from List to List */
System.out.println("Using Iterator approach between mapping from List to List");
Iterator itr=l.iterator();
while(itr.hasNext())
{
int next=(Integer) itr.next();
System.out.println("order id--->"+next+"items---->"+l1. toString()+"Size--->"+l1.size( ));
}
System.out.println(" ");
System.out.println("using for loop approach between mapping from List to List");
/*using for loop approach between mapping from List to List */
for(Object obj:l)
{
System.out.println("order id--->"+obj+"items---->"+l1. toString()+"Size--->"+l1.size( ));
}
}
}
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class Mul {
public static void main(String a[])
{
List l=new ArrayList();
List l1=new ArrayList();
l.add(1);
l.add(2);
l.add(3);
l1.add("banana");
l1.add("orange");
display1(l,l1);
}
public static void display1(List l,List l1)
{
/*Using Iterator approach between mapping from List to List */
System.out.println("Using Iterator approach between mapping from List to List");
Iterator itr=l.iterator();
while(itr.hasNext())
{
int next=(Integer) itr.next();
System.out.println("order id--->"+next+"items---->"+l1.
}
System.out.println(" ");
System.out.println("using for loop approach between mapping from List to List");
/*using for loop approach between mapping from List to List */
for(Object obj:l)
{
System.out.println("order id--->"+obj+"items---->"+l1.
}
}
}
Output:
Using Iterator approach between mapping from List to Map
order id---> 1 items---->[banana, Orange, Apple, grapes] Size--->4
order id---> 2 items---->[tea, Cofee] Size--->2
order id---> 3 items---->[chicken, mutton, Fish] Size--->3
using for loop approach between mapping from List to Map
Key : 1 Value : [banana, Orange, Apple, grapes] Size--->4
Key : 2 Value : [tea, Cofee] Size--->2
Key : 3 Value : [chicken, mutton, Fish] Size--->3
Using Iterator approach between mapping from List to Map
order id---> 1 items---->[banana, Orange, Apple, grapes] Size--->4
order id---> 2 items---->[tea, Cofee] Size--->2
order id---> 3 items---->[chicken, mutton, Fish] Size--->3
using for loop approach between mapping from List to Map
Key : 1 Value : [banana, Orange, Apple, grapes] Size--->4
Key : 2 Value : [tea, Cofee] Size--->2
Key : 3 Value : [chicken, mutton, Fish] Size--->3
Between LIST and MAP:
package com.surya.spring;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class Mulm {
public static void main(String a[])
{
List l=new ArrayList();
List itm1=Arrays.asList("banana"," Orange","Apple","grapes");
List itm2=Arrays.asList("tea"," Cofee");
List itm3=Arrays.asList("chicken"," mutton","Fish");
HashMap m=new HashMap();
l.add(1);
l.add(2);
l.add(3);
m.put(1,itm1);
m.put(2,itm2);
m.put(3,itm3);
display(l,m);
}
public static void display(List l,HashMap m)
{
/*Using Iterator approach between mapping from List to List */
System.out.println("Using Iterator approach between mapping from List to Map");
Iterator itr=l.iterator();
while(itr.hasNext())
{
int next=(Integer) itr.next();
Iterator it = m.keySet().iterator();
while(it.hasNext())
{
int key=it.next();
if(next==key)
System.out.println("order id---> "+next+" items---->"+m.get(key). toString()+" Size--->"+((List) m.get(key)).size());
}
}
System.out.println(" ");
System.out.println("using for loop approach between mapping from List to Map");
for(Object obj:l)
{
//Integer obj1=(Integer)obj;
//System.out.println(obj1. getClass().getName());
for (Object key : m.keySet()) {
//Integer obj2=(Integer)key;
if(obj==key)
System.out.println("Key : " + key.toString() + " Value : " + m.get(key)+" Size--->"+((List) m.get(key)).size());
}}}}
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class Mulm {
public static void main(String a[])
{
List l=new ArrayList();
List itm1=Arrays.asList("banana","
List itm2=Arrays.asList("tea","
List itm3=Arrays.asList("chicken","
HashMap
l.add(1);
l.add(2);
l.add(3);
m.put(1,itm1);
m.put(2,itm2);
m.put(3,itm3);
display(l,m);
}
public static void display(List l,HashMap m)
{
/*Using Iterator approach between mapping from List to List */
System.out.println("Using Iterator approach between mapping from List to Map");
Iterator itr=l.iterator();
while(itr.hasNext())
{
int next=(Integer) itr.next();
Iterator
while(it.hasNext())
{
int key=it.next();
if(next==key)
System.out.println("order id---> "+next+" items---->"+m.get(key).
}
}
System.out.println(" ");
System.out.println("using for loop approach between mapping from List to Map");
for(Object obj:l)
{
//Integer obj1=(Integer)obj;
//System.out.println(obj1.
for (Object key : m.keySet()) {
//Integer obj2=(Integer)key;
if(obj==key)
System.out.println("Key : " + key.toString() + " Value : " + m.get(key)+" Size--->"+((List) m.get(key)).size());
}}}}
Output:
Using Iterator approach between mapping from List to Map
order id---> 1 items---->[banana, Orange, Apple, grapes] Size--->4
order id---> 2 items---->[tea, Cofee] Size--->2
order id---> 3 items---->[chicken, mutton, Fish] Size--->3
using for loop approach between mapping from List to Map
Key : 1 Value : [banana, Orange, Apple, grapes] Size--->4
Key : 2 Value : [tea, Cofee] Size--->2
Key : 3 Value : [chicken, mutton, Fish] Size--->3
order id---> 1 items---->[banana, Orange, Apple, grapes] Size--->4
order id---> 2 items---->[tea, Cofee] Size--->2
order id---> 3 items---->[chicken, mutton, Fish] Size--->3
using for loop approach between mapping from List to Map
Key : 1 Value : [banana, Orange, Apple, grapes] Size--->4
Key : 2 Value : [tea, Cofee] Size--->2
Key : 3 Value : [chicken, mutton, Fish] Size--->3
Same Screenshot:
Tuesday, July 7, 2020
Hadoop and Cloud Analytics
Cloud Analytics (Hadoop + Cloud) Course 5th Session
1.Cloud Analytics -future of Hadoop and How cloud is affecting the hadoop
space
2. Hadoop 1 Architecture
3. Hadoop components
Components -MasterNodes -WorkerNode
HDFS -NameNode, SNN-DN
Processing Job Tracker Task Tracker
4.Hadoop 1 Limitation
1.SPOF of Name Node
2.High Intensive Job Tracker
Hadoop 2
SPOF overcome by High Availability/HDFS Federation
High Intensive Job Tracker overcome by YARN-Yet Naother Resource Negotiator
Job Tracker-->Resource Manager--->Application Manager
Schedular
Task Tracker---> Node Manager---->Container
Application Master
Hadoop1 suppose if you are processing -then it is called as Mapreduce1
Hadoop2(YARN) if you are processing-then it is called as Mapreduce2
Hadoop2-HA+Yarn
Hadoop1-SPOF Name Node
Hadoop2-HA+Checkpoint server(SNN)+Journal Node Control by Zookeeper
Hadoop1- You can run at a time only job
Hadoop2-You can run at a time multiple jobs.
----------------------------------------------------------------------
The FIFO Scheduler -First in First Out-->Default in Hadoop1-Job1--->Job2
CapacityScheduler, -Configures capacity in XML file-Job1-1 hr Job2-30 minutes-75%-job1 25% job2, both are running at the same time,
and FairScheduler-
-------------------------------------
OpenSource vs Distribution
Hadoop opensource-Enterprise they are not using as it is.
Hadoop Distribution-Cloudera+Hortonworks ,MapR
local filesystem Commands:
ls
ls -lrt
mkdir test
ls -ltr test
cd test
cat>>a.txt
ss
s
s
s
pwd
cat a.txt
cp
mv
rm
HDFS command:
hadoop fs -put inputfile.txt test.txt
hadoop fs -ls /user/cloudera
hadoop fs -cat /user/cloudera/test.txt
hadoop fs -mkdir /user/cloudera/todayclass
hadoop fs -ls /user/cloudera
hadoop fs -copyFromLocal sample.txt /user/cloudera/todayclass/simple.txt
hadoop fs -get /user/cloudera/todayclass/simple.txt bada.txt
hadoop fs -Ddfs.replication=4 -cp /user/cloudera/todayclass/simple.txt /user/cloudera/sample.txt
sudo -u hdfs hdfs fsck /user/cloudera/todayclass/ -files -blocks -locations
Data sec ops
Data Engineer
Job market
singleskil -DBA-Oracle
Broader skill
oracke DBA-No sql, c
cassandra,hbase,mongo
db,AWS RDS
----------------------
supply and Demand
supply less-Demand
supply more-
----------------------------
Hadoop 1
Hadoop components
Hadoopm1 limitations
Hadoop 2 HA
Hadoop 2 high JT-YARN
Hadoop comments
Fault tolerance
Map reduce:
----------------------------
processing logic in hadoop
each block has key-value
Map reduce:
Data set->splitting as Data block
Each block is 64 MB
6MB-block more-meta data -network banwidth-
600MB-
900 MB data set-600 MB
600MB-1 block
300 MB-1 block
900 MB-Default-64Mb-15 block
600 -2 block
6 -150 block
Mapper(key id value-number) -User Define
hfs-sitexml
dfs.block.size
13421418
Mapper(key-id value-number)- User Define
Sort and Shuffling(grouping)
Reduce and compute
1.Cloud Analytics -future of Hadoop and How cloud is affecting the hadoop
space
2. Hadoop 1 Architecture
3. Hadoop components
Components -MasterNodes -WorkerNode
HDFS -NameNode, SNN-DN
Processing Job Tracker Task Tracker
4.Hadoop 1 Limitation
1.SPOF of Name Node
2.High Intensive Job Tracker
Hadoop 2
SPOF overcome by High Availability/HDFS Federation
High Intensive Job Tracker overcome by YARN-Yet Naother Resource Negotiator
Job Tracker-->Resource Manager--->Application Manager
Schedular
Task Tracker---> Node Manager---->Container
Application Master
Hadoop1 suppose if you are processing -then it is called as Mapreduce1
Hadoop2(YARN) if you are processing-then it is called as Mapreduce2
Hadoop2-HA+Yarn
Hadoop1-SPOF Name Node
Hadoop2-HA+Checkpoint server(SNN)+Journal Node Control by Zookeeper
Hadoop1- You can run at a time only job
Hadoop2-You can run at a time multiple jobs.
----------------------------------------------------------------------
The FIFO Scheduler -First in First Out-->Default in Hadoop1-Job1--->Job2
CapacityScheduler, -Configures capacity in XML file-Job1-1 hr Job2-30 minutes-75%-job1 25% job2, both are running at the same time,
and FairScheduler-
-------------------------------------
OpenSource vs Distribution
Hadoop opensource-Enterprise they are not using as it is.
Hadoop Distribution-Cloudera+Hortonworks ,MapR
local filesystem Commands:
ls
ls -lrt
mkdir test
ls -ltr test
cd test
cat>>a.txt
ss
s
s
s
pwd
cat a.txt
cp
mv
rm
HDFS command:
hadoop fs -put inputfile.txt test.txt
hadoop fs -ls /user/cloudera
hadoop fs -cat /user/cloudera/test.txt
hadoop fs -mkdir /user/cloudera/todayclass
hadoop fs -ls /user/cloudera
hadoop fs -copyFromLocal sample.txt /user/cloudera/todayclass/simple.txt
hadoop fs -get /user/cloudera/todayclass/simple.txt bada.txt
hadoop fs -Ddfs.replication=4 -cp /user/cloudera/todayclass/simple.txt /user/cloudera/sample.txt
sudo -u hdfs hdfs fsck /user/cloudera/todayclass/ -files -blocks -locations
Data sec ops
Data Engineer
Job market
singleskil -DBA-Oracle
Broader skill
oracke DBA-No sql, c
cassandra,hbase,mongo
db,AWS RDS
----------------------
supply and Demand
supply less-Demand
supply more-
----------------------------
Hadoop 1
Hadoop components
Hadoopm1 limitations
Hadoop 2 HA
Hadoop 2 high JT-YARN
Hadoop comments
Fault tolerance
Map reduce:
----------------------------
processing logic in hadoop
each block has key-value
Map reduce:
Data set->splitting as Data block
Each block is 64 MB
6MB-block more-meta data -network banwidth-
600MB-
900 MB data set-600 MB
600MB-1 block
300 MB-1 block
900 MB-Default-64Mb-15 block
600 -2 block
6 -150 block
Mapper(key id value-number) -User Define
hfs-sitexml
dfs.block.size
13421418
Mapper(key-id value-number)- User Define
Sort and Shuffling(grouping)
Reduce and compute
Spring Learning
Spring DI(Dpendency Ingestion)
Spring IOC(Inversion of control)
Spring AOP-Aspect of oriented programming
Spring Autowiring
Spring JDBC
Spring ORM ->object relational mapping
Spring MVC
Spring Boot
Properties:
Spring Singleton:
Spring Prototype:
Spring Inheritance (parent):
Spring Lazy Init(Constructor):
DispatcherServelet(web.xml) Front Controller
---HandlerMapping(spring config file)
---Controller extends AbstractController(ModelAndView)
---Internal View Resolver
---Index.jsp
---Request Mappings(2)
---view(jsp)
Spring IOC(Inversion of control)
Spring AOP-Aspect of oriented programming
Spring Autowiring
Spring JDBC
Spring ORM ->object relational mapping
Spring MVC
Spring Boot
Properties:
Spring Singleton:
Spring Prototype:
Spring Inheritance (parent):
Spring Lazy Init(Constructor):
Config.xml:
"http://www.springframework. org/dtd/spring-beans-2.0.dtd">
constructor-arg
ref-> it is refered to another pojo class
ref keyword is used for dependency ingestion-->the alternative is autowiring(it
is invoked automatically)
Auto wiring:(Implicit dependency ingestion)
By Name
By Type
By Constructor
auto detect
Aspects oriented programming(AOP)
before. after,around,throws
ref-> it is refered to another pojo class
ref keyword is used for dependency ingestion-->the alternative is autowiring(it
is invoked automatically)
Auto wiring:(Implicit dependency ingestion)
By Name
By Type
By Constructor
auto detect
Aspects oriented programming(AOP)
before. after,around,throws
Monday, July 6, 2020
AWS -What is cloud
What is cloud:
100 windows server-
license -10k
2 days/2 months
time
storage
license
whatever we are following to create any service in on-premises, the same steps follow in cloud
All organisation moving towards cloud
No initial investment
use and Pay model
No extra hassels
Amazon workspace
Creating EC2 instance:
1)Created EC2 steps in AWS
2)PUTTYGen-->load the private key file-->save as ppk file
3)Open putty-->put your ssh-->add the ppk file
4)open your ip address through Putty
5)sudo su ~ or sudo su root
6)apt-get update
7)apt install apache2
8)service apache2 start
9)service apache2 status
10)cd /var/www/html
11)vi index.html--> change something
12)put your ipaddress in the browser, you can see your changes
aws console---> aws.amazon.com
Free Tier- 12 months Free Access
New user-creating the account
old user-New mail id,New Debit card/credit card,Mobile Number
Support Plan Basic(Free)
Business
Enterprise
Phone number -Verification(suggested to use Mobile Number)
Payment(CC/DB Card)-verification
Region--->
Availability Zones(min 2 to max 6 zones)
Latency->The time taken to intiate the process(2s) (4s)
Charges
-------
Time
type (ec2-1 core 1GB,256 core-1920GB RAM)
Network- 2 GB Video-100 persons vs 2GB video-100,000
AWS monthly calculator
AWS Pricing calculator
Purchasing Options
-------------------
On-Demand
Reserved -->Company usage
Spot Instance
Region-which region
-----------------------
100 machine-purchasing-On-demand(default)
-Reserved- 3 years/1 year-2$(advanced pay)
USA company -->End User Europe-->Chennai
Instance Type
General Purpose
Compute optimized
GPU Instance
Mempry Optimized
T2/T3 unlimited -1CPU-1GB RAM
Real Time-Tag
-------------------------
Security Group->Inbound /Outbound Access
49.207.141.49/32- single machine
49.207.14.20/24-
49.207.14.20/16
49.207.14.20/8
yum update -y
yum install httpd
systemctl start httpd
systemctl enable httpd
cd /var/www/html
vi index.html
Meta data http://169.254.169.254/latest/meta-data
dynamic
-public ,private IP
Per account 5 static (Elastic IP
user data:
#!/bin/bash
yum update -y
yum install -y httpd
systemctl start httpd
systemctl enable httpd
echo "hello">/var/www/html/index.html
Template:
USA company -->End User Europe-->Chennai
Instance Type
General Purpose
Compute optimized
GPU Instance
Memory Optimized
T2/T3 unlimited -1CPU-1GB RAM
Real Time-Tag
-------------------------
Security Group->Inbound /Outbound Access
49.207.141.49/32- single machine
49.207.14.20/24-
49.207.14.20/16
49.207.14.20/8
yum update -y
yum install httpd
systemctl start httpd
systemctl enable httpd
cd /var/www/html
vi index.html
Meta data http://169.254.169.254/latest/meta-data
dynamic
-public ,private IP
Per account 5 static (Elastic IP
user data:
#!/bin/bash
yum update -y
yum install -y httpd
systemctl start httpd
systemctl enable httpd
echo "hello">/var/www/html/index.html
Template:
100 windows server-
license -10k
2 days/2 months
time
storage
license
whatever we are following to create any service in on-premises, the same steps follow in cloud
All organisation moving towards cloud
No initial investment
use and Pay model
No extra hassels
Amazon workspace
Creating EC2 instance:
1)Created EC2 steps in AWS
2)PUTTYGen-->load the private key file-->save as ppk file
3)Open putty-->put your ssh-->add the ppk file
4)open your ip address through Putty
5)sudo su ~ or sudo su root
6)apt-get update
7)apt install apache2
8)service apache2 start
9)service apache2 status
10)cd /var/www/html
11)vi index.html--> change something
12)put your ipaddress in the browser, you can see your changes
aws console---> aws.amazon.com
Free Tier- 12 months Free Access
New user-creating the account
old user-New mail id,New Debit card/credit card,Mobile Number
Support Plan Basic(Free)
Business
Enterprise
Phone number -Verification(suggested to use Mobile Number)
Payment(CC/DB Card)-verification
Region--->
Availability Zones(min 2 to max 6 zones)
Latency->The time taken to intiate the process(2s) (4s)
Charges
-------
Time
type (ec2-1 core 1GB,256 core-1920GB RAM)
Network- 2 GB Video-100 persons vs 2GB video-100,000
AWS monthly calculator
AWS Pricing calculator
Purchasing Options
-------------------
On-Demand
Reserved -->Company usage
Spot Instance
Region-which region
-----------------------
100 machine-purchasing-On-demand(default)
-Reserved- 3 years/1 year-2$(advanced pay)
USA company -->End User Europe-->Chennai
Instance Type
General Purpose
Compute optimized
GPU Instance
Mempry Optimized
T2/T3 unlimited -1CPU-1GB RAM
Real Time-Tag
-------------------------
Security Group->Inbound /Outbound Access
49.207.141.49/32- single machine
49.207.14.20/24-
49.207.14.20/16
49.207.14.20/8
yum update -y
yum install httpd
systemctl start httpd
systemctl enable httpd
cd /var/www/html
vi index.html
Meta data http://169.254.169.254/latest/meta-data
dynamic
-public ,private IP
Per account 5 static (Elastic IP
user data:
#!/bin/bash
yum update -y
yum install -y httpd
systemctl start httpd
systemctl enable httpd
echo "hello">/var/www/html/index.html
Template:
USA company -->End User Europe-->Chennai
Instance Type
General Purpose
Compute optimized
GPU Instance
Memory Optimized
T2/T3 unlimited -1CPU-1GB RAM
Real Time-Tag
-------------------------
Security Group->Inbound /Outbound Access
49.207.141.49/32- single machine
49.207.14.20/24-
49.207.14.20/16
49.207.14.20/8
yum update -y
yum install httpd
systemctl start httpd
systemctl enable httpd
cd /var/www/html
vi index.html
Meta data http://169.254.169.254/latest/meta-data
dynamic
-public ,private IP
Per account 5 static (Elastic IP
user data:
#!/bin/bash
yum update -y
yum install -y httpd
systemctl start httpd
systemctl enable httpd
echo "hello">/var/www/html/index.html
Template:
Wednesday, May 27, 2020
Subscribe to:
Posts (Atom)
Python Challenges Program
Challenges program: program 1: #Input :ABAABBCA #Output: A4B3C1 str1="ABAABBCA" str2="" d={} for x in str1: d[x]=d...
-
Conditional Functions Return Type Name(Signature) Description T if(boolean testCondition, T valueTrue, T valueFalseOrN...
-
PYSPARK Regular Expression Operations read data from hdfs data is unstructured text data we have to clean the data(regular expressio...
-
Alter Table Rename Table ALTER TABLE table_name RENAME TO new_table_name; Alter Table Properties ALTER TABLE table_name SET TBLPROPERTI...