0% found this document useful (0 votes)
8 views11 pages

MYSQL

The document contains data about bank customers including their age, job, marital status, education, default history, balance, housing status, loans, and other attributes. It defines a table to store this customer data and inserts multiple rows of sample customer records into the table.

Uploaded by

gkundan1996
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
8 views11 pages

MYSQL

The document contains data about bank customers including their age, job, marital status, education, default history, balance, housing status, loans, and other attributes. It defines a table to store this customer data and inserts multiple rows of sample customer records into the table.

Uploaded by

gkundan1996
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 11

show databases

create database if not exists ineuron_fsda


use ineuron_fsda

create table if not exists bank_details(


age int,
job varchar(30),
marital varchar(30),
education varchar(30),
`default` varchar(30),
balance int ,
housing varchar(30),
loan varchar(30) ,
contact varchar(30),
`day` int,
`month` varchar(30) ,
duration int ,
campaign int,
pdays int ,
previous int ,
poutcome varchar(30) ,
y varchar(30))

insert into bank_details values


(58,"management","married","tertiary","no",2143,"yes","no","unknown",5,"may",261,1,
-1,0,"unknown","no"),
(44,"technician","single","secondary","no",29,"yes","no","unknown",5,"may",151,1,-
1,0,"unknown","no"),
(33,"entrepreneur","married","secondary","no",2,"yes","yes","unknown",5,"may",76,1,
-1,0,"unknown","no"),
(47,"blue-collar","married","unknown","no",1506,"yes","no","unknown",5,"may",92,1,-
1,0,"unknown","no"),
(33,"unknown","single","unknown","no",1,"no","no","unknown",5,"may",198,1,-
1,0,"unknown","no"),
(35,"management","married","tertiary","no",231,"yes","no","unknown",5,"may",139,1,-
1,0,"unknown","no"),
(28,"management","single","tertiary","no",447,"yes","yes","unknown",5,"may",217,1,-
1,0,"unknown","no"),
(42,"entrepreneur","divorced","tertiary","yes",2,"yes","no","unknown",5,"may",380,1
,-1,0,"unknown","no"),
(58,"retired","married","primary","no",121,"yes","no","unknown",5,"may",50,1,-
1,0,"unknown","no"),
(43,"technician","single","secondary","no",593,"yes","no","unknown",5,"may",55,1,-
1,0,"unknown","no"),
(41,"admin.","divorced","secondary","no",270,"yes","no","unknown",5,"may",222,1,-
1,0,"unknown","no"),
(29,"admin.","single","secondary","no",390,"yes","no","unknown",5,"may",137,1,-
1,0,"unknown","no"),
(53,"technician","married","secondary","no",6,"yes","no","unknown",5,"may",517,1,-
1,0,"unknown","no"),
(58,"technician","married","unknown","no",71,"yes","no","unknown",5,"may",71,1,-
1,0,"unknown","no"),
(57,"services","married","secondary","no",162,"yes","no","unknown",5,"may",174,1,-
1,0,"unknown","no"),
(51,"retired","married","primary","no",229,"yes","no","unknown",5,"may",353,1,-
1,0,"unknown","no"),
(45,"admin.","single","unknown","no",13,"yes","no","unknown",5,"may",98,1,-
1,0,"unknown","no"),
(57,"blue-collar","married","primary","no",52,"yes","no","unknown",5,"may",38,1,-
1,0,"unknown","no"),
(60,"retired","married","primary","no",60,"yes","no","unknown",5,"may",219,1,-
1,0,"unknown","no"),
(33,"services","married","secondary","no",0,"yes","no","unknown",5,"may",54,1,-
1,0,"unknown","no"),
(28,"blue-
collar","married","secondary","no",723,"yes","yes","unknown",5,"may",262,1,-
1,0,"unknown","no"),
(56,"management","married","tertiary","no",779,"yes","no","unknown",5,"may",164,1,-
1,0,"unknown","no"),
(32,"blue-collar","single","primary","no",23,"yes","yes","unknown",5,"may",160,1,-
1,0,"unknown","no"),
(25,"services","married","secondary","no",50,"yes","no","unknown",5,"may",342,1,-
1,0,"unknown","no"),
(40,"retired","married","primary","no",0,"yes","yes","unknown",5,"may",181,1,-
1,0,"unknown","no"),
(44,"admin.","married","secondary","no",-372,"yes","no","unknown",5,"may",172,1,-
1,0,"unknown","no"),
(39,"management","single","tertiary","no",255,"yes","no","unknown",5,"may",296,1,-
1,0,"unknown","no"),
(52,"entrepreneur","married","secondary","no",113,"yes","yes","unknown",5,"may",127
,1,-1,0,"unknown","no"),
(46,"management","single","secondary","no",-
246,"yes","no","unknown",5,"may",255,2,-1,0,"unknown","no"),
(36,"technician","single","secondary","no",265,"yes","yes","unknown",5,"may",348,1,
-1,0,"unknown","no"),
(57,"technician","married","secondary","no",839,"no","yes","unknown",5,"may",225,1,
-1,0,"unknown","no"),
(49,"management","married","tertiary","no",378,"yes","no","unknown",5,"may",230,1,-
1,0,"unknown","no"),
(60,"admin.","married","secondary","no",39,"yes","yes","unknown",5,"may",208,1,-
1,0,"unknown","no"),
(59,"blue-collar","married","secondary","no",0,"yes","no","unknown",5,"may",226,1,-
1,0,"unknown","no"),
(51,"management","married","tertiary","no",10635,"yes","no","unknown",5,"may",336,1
,-1,0,"unknown","no"),
(57,"technician","divorced","secondary","no",63,"yes","no","unknown",5,"may",242,1,
-1,0,"unknown","no"),
(25,"blue-collar","married","secondary","no",-
7,"yes","no","unknown",5,"may",365,1,-1,0,"unknown","no"),
(53,"technician","married","secondary","no",-3,"no","no","unknown",5,"may",1666,1,-
1,0,"unknown","no"),
(36,"admin.","divorced","secondary","no",506,"yes","no","unknown",5,"may",577,1,-
1,0,"unknown","no"),
(37,"admin.","single","secondary","no",0,"yes","no","unknown",5,"may",137,1,-
1,0,"unknown","no"),
(44,"services","divorced","secondary","no",2586,"yes","no","unknown",5,"may",160,1,
-1,0,"unknown","no"),
(50,"management","married","secondary","no",49,"yes","no","unknown",5,"may",180,2,-
1,0,"unknown","no"),
(60,"blue-collar","married","unknown","no",104,"yes","no","unknown",5,"may",22,1,-
1,0,"unknown","no"),
(54,"retired","married","secondary","no",529,"yes","no","unknown",5,"may",1492,1,-
1,0,"unknown","no"),
(58,"retired","married","unknown","no",96,"yes","no","unknown",5,"may",616,1,-
1,0,"unknown","no"),
(36,"admin.","single","primary","no",-171,"yes","no","unknown",5,"may",242,1,-
1,0,"unknown","no"),
(58,"self-employed","married","tertiary","no",-
364,"yes","no","unknown",5,"may",355,1,-1,0,"unknown","no"),
(44,"technician","married","secondary","no",0,"yes","no","unknown",5,"may",225,2,-
1,0,"unknown","no"),
(55,"technician","divorced","secondary","no",0,"no","no","unknown",5,"may",160,1,-
1,0,"unknown","no"),
(29,"management","single","tertiary","no",0,"yes","no","unknown",5,"may",363,1,-
1,0,"unknown","no"),
(54,"blue-
collar","married","secondary","no",1291,"yes","no","unknown",5,"may",266,1,-
1,0,"unknown","no"),
(48,"management","divorced","tertiary","no",-
244,"yes","no","unknown",5,"may",253,1,-1,0,"unknown","no"),
(32,"management","married","tertiary","no",0,"yes","no","unknown",5,"may",179,1,-
1,0,"unknown","no"),
(42,"admin.","single","secondary","no",-76,"yes","no","unknown",5,"may",787,1,-
1,0,"unknown","no"),
(24,"technician","single","secondary","no",-
103,"yes","yes","unknown",5,"may",145,1,-1,0,"unknown","no"),
(38,"entrepreneur","single","tertiary","no",243,"no","yes","unknown",5,"may",174,1,
-1,0,"unknown","no"),
(38,"management","single","tertiary","no",424,"yes","no","unknown",5,"may",104,1,-
1,0,"unknown","no"),
(47,"blue-collar","married","unknown","no",306,"yes","no","unknown",5,"may",13,1,-
1,0,"unknown","no"),
(40,"blue-collar","single","unknown","no",24,"yes","no","unknown",5,"may",185,1,-
1,0,"unknown","no"),
(46,"services","married","primary","no",179,"yes","no","unknown",5,"may",1778,1,-
1,0,"unknown","no"),
(32,"admin.","married","tertiary","no",0,"yes","no","unknown",5,"may",138,1,-
1,0,"unknown","no"),
(53,"technician","divorced","secondary","no",989,"yes","no","unknown",5,"may",812,1
,-1,0,"unknown","no"),
(57,"blue-collar","married","primary","no",249,"yes","no","unknown",5,"may",164,1,-
1,0,"unknown","no"),
(33,"services","married","secondary","no",790,"yes","no","unknown",5,"may",391,1,-
1,0,"unknown","no"),
(49,"blue-collar","married","unknown","no",154,"yes","no","unknown",5,"may",357,1,-
1,0,"unknown","no"),
(51,"management","married","tertiary","no",6530,"yes","no","unknown",5,"may",91,1,-
1,0,"unknown","no"),
(60,"retired","married","tertiary","no",100,"no","no","unknown",5,"may",528,1,-
1,0,"unknown","no"),
(59,"management","divorced","tertiary","no",59,"yes","no","unknown",5,"may",273,1,-
1,0,"unknown","no"),
(55,"technician","married","secondary","no",1205,"yes","no","unknown",5,"may",158,2
,-1,0,"unknown","no"),
(35,"blue-
collar","single","secondary","no",12223,"yes","yes","unknown",5,"may",177,1,-
1,0,"unknown","no"),
(57,"blue-
collar","married","secondary","no",5935,"yes","yes","unknown",5,"may",258,1,-
1,0,"unknown","no"),
(31,"services","married","secondary","no",25,"yes","yes","unknown",5,"may",172,1,-
1,0,"unknown","no"),
(54,"management","married","secondary","no",282,"yes","yes","unknown",5,"may",154,1
,-1,0,"unknown","no"),
(55,"blue-collar","married","primary","no",23,"yes","no","unknown",5,"may",291,1,-
1,0,"unknown","no"),
(43,"technician","married","secondary","no",1937,"yes","no","unknown",5,"may",181,1
,-1,0,"unknown","no"),
(53,"technician","married","secondary","no",384,"yes","no","unknown",5,"may",176,1,
-1,0,"unknown","no"),
(44,"blue-
collar","married","secondary","no",582,"no","yes","unknown",5,"may",211,1,-
1,0,"unknown","no"),
(55,"services","divorced","secondary","no",91,"no","no","unknown",5,"may",349,1,-
1,0,"unknown","no"),
(49,"services","divorced","secondary","no",0,"yes","yes","unknown",5,"may",272,1,-
1,0,"unknown","no"),
(55,"services","divorced","secondary","yes",1,"yes","no","unknown",5,"may",208,1,-
1,0,"unknown","no"),
(45,"admin.","single","secondary","no",206,"yes","no","unknown",5,"may",193,1,-
1,0,"unknown","no"),
(47,"services","divorced","secondary","no",164,"no","no","unknown",5,"may",212,1,-
1,0,"unknown","no"),
(42,"technician","single","secondary","no",690,"yes","no","unknown",5,"may",20,1,-
1,0,"unknown","no"),
(59,"admin.","married","secondary","no",2343,"yes","no","unknown",5,"may",1042,1,-
1,0,"unknown","yes"),
(46,"self-
employed","married","tertiary","no",137,"yes","yes","unknown",5,"may",246,1,-
1,0,"unknown","no"),
(51,"blue-collar","married","primary","no",173,"yes","no","unknown",5,"may",529,2,-
1,0,"unknown","no"),
(56,"admin.","married","secondary","no",45,"no","no","unknown",5,"may",1467,1,-
1,0,"unknown","yes"),
(41,"technician","married","secondary","no",1270,"yes","no","unknown",5,"may",1389,
1,-1,0,"unknown","yes"),
(46,"management","divorced","secondary","no",16,"yes","yes","unknown",5,"may",188,2
,-1,0,"unknown","no"),
(57,"retired","married","secondary","no",486,"yes","no","unknown",5,"may",180,2,-
1,0,"unknown","no"),
(42,"management","single","secondary","no",50,"no","no","unknown",5,"may",48,1,-
1,0,"unknown","no"),
(30,"technician","married","secondary","no",152,"yes","yes","unknown",5,"may",213,2
,-1,0,"unknown","no"),
(60,"admin.","married","secondary","no",290,"yes","no","unknown",5,"may",583,1,-
1,0,"unknown","no")

select count(*) from bank_details

select * from bank_details

select age , loan , job from bank_details

select `default` from bank_details

select * from bank_details limit 10

select * from bank_details where age = 33

select * from bank_details where age = 60

select * from bank_details where age = 60 and job = 'retired'

select * from bank_details where education = 'unknown' or marital = 'single'

select * from bank_details where (education = 'unknown' or marital = 'single') and


balance < 500

select distinct job from bank_details

select * from bank_details

select * from bank_details order by age


select * from bank_details order by age desc

---------------------------------
PROBLEMS---------------------------------------------------------------------------
---------

try to find out sum of balance


try to find out avarage of balance
try to find out who is having a min balance
try to find out who is having a mazxmim balance
try to prepare a list of all the person who is having loan
try to find out average balance for all the people whose job role is admin
try to find out a record without job whose age is below 45
try to find out a record where education is primarty and person is jobless
try to find out a record whose bank account is having a negative balance
try to find out a record who is not having house at all along with their balance

--------------------------------
solutions--------------------------------------------------------------------------
--

select sum(balance) from bank_details


select avg(balance) from bank_details

select min(balance) from bank_details


select * from bank_details order by balance
select * from bank_details order by balance limit 1
select * from bank_details where balance in (select min(balance) from
bank_details)
select * from bank_details where balance = (select min(balance) from bank_details)

select max(balance) from bank_details


select * from bank_details order by balance desc
select * from bank_details order by balance desc limit 1
select * from bank_details where balance in (select max(balance) from bank_details)
select * from bank_details where balance = (select max(balance) from bank_details)

select count(*) from bank_details where loan='yes'


select * from bank_details where loan='yes'
select*from bank_details where job='admin.'
select avg(balance) from bank_details where job='admin.'
select* from bank_details where job='unknown' and age<45
select * from bank_details where education='primary' and job='unknown'
select*from bank_details where balance<0
select balance,housing from bank_details
select balance,housing from bank_details where housing='no'

-----------------------------------------------------------------------------------
------------------------------------------------

DELIMITER &&
create procedure sudh()
BEGIN
select * from bank_details;
END &&
call sudh()

DELIMITER &&
create procedure bal_max()
BEGIN
select * from bank_details where balance in (select max(balance) from
bank_details);
END &&
call bal_max()

DELIMITER &&
create procedure avg_bal_jobrole1(IN sudh varchar(30))
BEGIN
select avg(balance) from bank_details where job = sudh;
END &&

call avg_bal_jobrole1('admin.')
call avg_bal_jobrole1('retired')
call avg_bal_jobrole1('unknown')

DELIMITER &&
create procedure sel_edu_job1(in v1 varchar(30) , in v2 varchar(30) )
BEGIN
select * from bank_details where education = v1 and job = v2;
END &&
call sel_edu_job1('tertiary' , 'retired')

create view bank_view as select age , job , marital , balance , education from
bank_details;
select avg(balance) from bank_view where job = 'admin.'

-----------------------------------------------------------------------------------
-------------------------------------

create database dress_data;


use dress_data;

create table if not exists dress(


`Dress_ID` varchar(30),
`Style` varchar(30),
`Price` varchar(30),
`Rating` varchar(30),
`Size` varchar(30),
`Season` varchar(30),
`NeckLine` varchar(30),
`SleeveLength` varchar(30),
`waiseline` varchar(30),
`Material` varchar(30),
`FabricType` varchar(30),
`Decoration` varchar(30),
`Pattern Type` varchar(30),
`Recommendation` varchar(30))

secure-file-priv
LOAD DATA INFILE
'D:/AttributeDataSet.csv'
into table dress
FIELDS TERMINATED by ','
ENCLOSED by '"'
lines terminated by '\n'
IGNORE 1 ROWS;

select * from dress ;

create table if not exists test (


test_id int auto_increment,
test_name varchar(30) ,
test_mailid varchar(30),
teast_adress varchar(30),
primary key (test_id))

select * from test ;

insert into test values (1,'sudhanshu','[email protected]','benglaore'),


(2,'krish','[email protected]', 'bengalore'),
(3,'hitesh' ,'[email protected]','bengalore'),
(4,'shubahm' , '[email protected]', 'jaipur')

select * from test ;

create table if not exists test2 (


test_id int not null auto_increment,
test_name varchar(30) ,
test_mailid varchar(30),
teast_adress varchar(30),
primary key (test_id))

insert into test2 (test_name , test_mailid , teast_adress) values


('sudhanshu','[email protected]','benglaore'),
('krish','[email protected]', 'bengalore'),
('hitesh' ,'[email protected]','bengalore'),
('shubahm' , '[email protected]', 'jaipur')

select * from test2

create table if not exists test3 (


test_id int ,
test_name varchar(30) ,
test_mailid varchar(30),
teast_adress varchar(30),
test_salary int check(test_salary > 10000))

alter table test3 add check (test_id > 0)


insert into test3 values (19,'sudhanshu','[email protected]','benglaore' ,
50000)

insert into test3 values (1,'sudhanshu','[email protected]','benglaore' ,


50000),
(2,'krish','[email protected]', 'bengalore' , 30000),
(3,'hitesh' ,'[email protected]','bengalore' , 111000),
(4,'shubahm' , '[email protected]', 'jaipur',20000)

create table if not exists test4 (


test_id int ,
test_name varchar(30) ,
test_mailid varchar(30),
teast_adress varchar(30) check (teast_adress= 'bengalore'),
test_salary int check(test_salary > 10000))

insert into test4 values (1,'sudhanshu','[email protected]','bengalore' , 50000)

create table if not exists test5(


test_id int NOT NULL,
test_name varchar(30) ,
test_mailid varchar(30),
teast_adress varchar(30) check (teast_adress= 'bengalore'),
test_salary int check(test_salary > 10000))

select * from test5


insert into test5 ( test_name , test_mailid , teast_adress,test_salary) values
('sudhanshu','[email protected]','bengalore' , 50000)

create table if not exists test6(


test_id int NOT NULL default 0 ,
test_name varchar(30) ,
test_mailid varchar(30),
teast_adress varchar(30) check (teast_adress= 'bengalore'),
test_salary int check(test_salary > 10000))

insert into test6 ( test_name , test_mailid , teast_adress,test_salary) values


('sudhanshu','[email protected]','bengalore' , 50000)

select * from test6;

insert into test6 ( test_id,test_name , test_mailid , teast_adress,test_salary)


values (101 , 'sudhanshu1','[email protected]','bengalore' , 50000)

create table if not exists test7(


test_id int NOT NULL default 0 ,
test_name varchar(30) ,
test_mailid varchar(30) unique ,
teast_adress varchar(30) check (teast_adress= 'bengalore'),
test_salary int check(test_salary > 10000))

insert into test7 ( test_name , test_mailid , teast_adress,test_salary) values


('sudhanshu','[email protected]','bengalore' , 50000)

create table if not exists test8(


test_id int NOT NULL auto_increment ,
test_name varchar(30) NOT NULL default 'unknown' ,
test_mailid varchar(30) unique NOT NULL,
teast_adress varchar(30) check (teast_adress= 'bengalore') NOT NULL,
test_salary int check(test_salary > 10000) NOT NULL,
primary key (test_id))

select * from test8


insert into test8 ( test_id , test_name , test_mailid , teast_adress,test_salary)
values (101 , 'sudhanshu','[email protected]','bengalore' , 50000)

insert into test8 ( test_name , test_mailid , teast_adress,test_salary) values


('sudhanshu','[email protected]','bengalore' , 50000)

CREATE TABLE `machines` (


`id` int(11) NOT NULL,
`hostname` varchar(255),
`ip_address` varchar(15),
`network` varchar(255) NOT NULL,
CONSTRAINT CHK_network CHECK (network = 'INTERNAL' OR 'EXTERNAL' OR 'OTHER'),
PRIMARY KEY (`id`)
);

INSERT INTO `machines`

VALUES
(1, 'host1', '123.123.123.1', 'EXTERNAL'),
(2, 'host2', '192.168.0.1', 'EXTERNAL' ),
(3, 'host3', '192.168.0.2', ' ' );

-----------------------------------------------------------------------------------
------------------------------------

create database sales


use sales
CREATE TABLE sales1 (
order_id VARCHAR(15) NOT NULL,
order_date VARCHAR(15) NOT NULL,
ship_date VARCHAR(15) NOT NULL,
ship_mode VARCHAR(14) NOT NULL,
customer_name VARCHAR(22) NOT NULL,
segment VARCHAR(11) NOT NULL,
state VARCHAR(36) NOT NULL,
country VARCHAR(32) NOT NULL,
market VARCHAR(6) NOT NULL,
region VARCHAR(14) NOT NULL,
product_id VARCHAR(16) NOT NULL,
category VARCHAR(15) NOT NULL,
sub_category VARCHAR(11) NOT NULL,
product_name VARCHAR(127) NOT NULL,
sales DECIMAL(38, 0) NOT NULL,
quantity DECIMAL(38, 0) NOT NULL,
discount DECIMAL(38, 3) NOT NULL,
profit DECIMAL(38, 8) NOT NULL,
shipping_cost DECIMAL(38, 2) NOT NULL,
order_priority VARCHAR(8) NOT NULL,
`year` DECIMAL(38, 0) NOT NULL
);

SET SESSION sql_mode = ''

load data infile


'D:/sales_data_final.csv'
into table sales1
fields terminated by ','
enclosed by '"'
lines terminated by '\n'
ignore 1 rows

select * from sales1

select str_to_date(order_date,'%m/%d/%y') from sales1

alter table sales1


add column order_date_new date after order_date

update sales1
set order_date_new = str_to_date(order_date,'%m/%d/%Y')

alter table sales1


add column ship_date_new date after ship_date

update sales1
set ship_date_new = str_to_date(ship_date, '%m/%d/%Y')

select * from sales1

SET SQL_SAFE_UPDATES = 0;

select * from sales1 where ship_date_new = '2011-01-05'


select * from sales1 where ship_date_new > '2011-01-05'
select * from sales1 where ship_date_new < '2011-01-05'
select * from sales1 where ship_date_new between '2011-01-05' and '2011-08-30'
select now()
select curdate()
select curtime()

select * from sales1 where ship_date_new < date_sub(now() , interval 1 week)

select date_sub(now() , interval 1 week)


select date_sub(now() , interval 30 day)
select date_sub(now() , interval 30 year)
select year(now())
select dayname('2022-09-20 21:10:30')

alter table sales1


add column flag date after order_id

update sales1
set flag = now()

select * from sales1

ALTER TABLE sales1


modify column year datetime;

alter table sales1


modify column Year_New int;

alter table sales1


modify column Month_New int;

alter table sales1


modify column Day_New int;

update sales1 set Month_new= month(order_date_new)


update sales1 set day_new= day(order_date_new);
update sales1 set year_new= year(order_date_new);

select * from sales1 limit 5

select month(order_date_new) from sales1

select year_new , avg(sales) from sales1 group by year_new

select year_new , sum(sales) from sales1 group by year_new

select year_new , min(sales) from sales1 group by year_new


select year_new , max(sales) from sales1 group by year_new

select year_new , sum(quantity) from sales1 group by year_new

select (sales*discount+shipping_cost) as CTC from sales1;


select order_id ,discount , if(discount > 0 ,'yes' , 'no') as discount_flag from
sales1

alter table sales1


modify column discount_flag varchar(20) after discount

select * from sales1 ;

select discount_flag , count(*) from sales1 group by discount_flag

select count(*) from sales1 where discount > 0

update sales1
set discount_flag = if(discount > 0, 'yes', 'no');

-----------------------------------------------------------------------------------
----------------------------------------

You might also like