SQL Cheat Sheet

This page contains a sample of "general purpose" SQL script templates.


Quick Index

   


create table
CREATE TABLE <table_name> 
(    
    <column_name_1>     NUMBER              NOT NULL,  
    <column_name_2>     VARCHAR2( 15 )      NOT NULL,   
    <column_name_3>     VARCHAR2( 320 )     NOT NULL,           
    <column_name_4>     VARCHAR2( 20 ),
    <column_name_5>     DATE                DEFAULT SYSDATE     NOT NULL,     
    <column_name_6>     DATE,
    CONSTRAINT <primary_key_name>           PRIMARY KEY ( <column_name_1> )                   
)
create table index
CREATE INDEX <index_name>
    ON <table_name> ( <column_name> )
create unique table index
CREATE UNIQUE INDEX <index_name> 
    ON <table_name> ( <column_name> )
create table sequence
CREATE SEQUENCE <sequence_name> 
    MINVALUE 1
    START WITH 1
    INCREMENT BY 1
    CACHE 10;
CREATE SEQUENCE <table_schema>.<sequence_name>
    START WITH 1000
    MAXVALUE 999999999999999999999999999
    MINVALUE 1
    CYCLE
    NOCACHE
    NOORDER;
    
CREATE SEQUENCE <table_schema>.<sequence_name>
    START WITH 1
    MAXVALUE 999999999999999999999999999
    MINVALUE 1
    CYCLE
    CACHE 100
    NOORDER;
select values from table
   
SELECT * 
    FROM <table_name> 
    WHERE <column_#1_name> = '<value_#1>' AND <column_#2_name> = '<value_#2>'
 
SELECT <column_#1_name>, <column_#3_name> 
    FROM <table_schema>.<table_name> 
    WHERE <column_#2_name> = '<value_#2>' AND <column_#1_name> LIKE '%<value_#1>'
 
SELECT <column_#1_name> 
    FROM <table_name> 
    WHERE <column_#1_name> NOT LIKE '<value_#1>%'
 
SELECT ( floor( <column_#1_name> / 1024 ) || ',' || mod( <column_#1_name>, 1024 ) ) AS <alias_#1_name>,  
        to_char( <column_#2_name>, 'DD/MM' ) AS <alias_#2_name>, 
        to_char( LAST_DAY( SYSDATE ), 'DD/MM' ) AS <alias_#3_name>
    FROM <table_schema>.<table_name> 
    WHERE <column_#4_name> = '<value_#4>' 
 
SELECT <column_#1_name>, <column_#2_name> 
    FROM <table_schema>.<table_name> 
    WHERE <column_#1_name> = '<value_#1>' 
        AND <column_#2_name> IN ( '<value_#2a>', '<value_#2b>' ) 
        AND <column_#3_name> != '<value_#3>' 
        AND <column_#4_name> NOT IN ( '<value_#4a>', '<value_#4b>' )
 
SELECT <table_schema>.<table_#1_name>.<column_#1.1_name>, 
        to_char( <table_schema>.<table_#1_name>.<column_#1.2_name>, 'dd/mm, HH24:mi' ), 
        to_char( ( <table_schema>.<table_#1_name>.<column_#1.3_name> - 1 ), 'dd/mm' )
    FROM <table_schema>.<table_#1_name>, <table_schema>.<table_#2_name> 
    WHERE <table_schema>.<table_#1_name>.<column_name> = <table_schema>.<table_#2_name>.<column_name> 
        AND <table_schema>.<table_#2_name>.<column_name> = '<value>' 
        AND <table_schema>.<table_#2_name>.<column_#2.1_name> IN ( '<value_#2.1a>', '<value_#2.1b>' )
 
SELECT <column_#1_name>,
        to_char( <column_#2_name>, 'dd/mm, HH24:mi' ),
        to_char( <column_#2_name>, 'MONTH', 'NLS_DATE_LANGUAGE = GREEK' ),
        to_char( <column_#3_name>, 'dd/mm/yy HH24:mi' ),
        to_char( ( <column_#4_name> - 1 ), 'dd/mm' ) 
    FROM <table_schema>.<table_name> 
    WHERE <column_#1_name> = '<value_#1>'
 
SELECT <column_#1_name>,
        CEIL( ( <column_#2_name> - mod( <column_#2_name>, 60 ) ) / 60 ) || ':' || ( mod( <column_#2_name>, 60 ) ) AS <alias_#2_name>,          
        to_char( <column_#3_name>, 'dd/mm, HH24:mi' ) AS <alias_#3_name>,
        to_char( <column_#4_name>, 'MONTH', 'NLS_DATE_LANGUAGE = GREEK' ) AS <alias_#4_name>,                  
        to_char( last_day( SYSDATE ),'dd/MM' ) AS <alias_#5_name>,         
    FROM <table_schema>.<table_name> a
    WHERE <column_#1_name> IN ( '<value_#1>', '<value_#2>' ) 
 
SELECT substr( <column_#1_name>, instr( <column_#1_name>, '(') + 1, instr( <column_#1_name>, ')' ) - instr( <column_#1_name>, '(' ) - 1 ) AS <alias_#1_name>
    FROM <table_name>
    WHERE ( ( <column_#1_name> IN ( '<value_#1>', '<value_#2>' ) ) 
            OR ( <column_#1_name> IS NULL 
                AND <column_#2_name> IN ( '<value_#3>', '<value_#4>' ) )
            OR ( <column_#1_name> LIKE '%<value_#5>%' ) 
            OR ( <column_#1_name> LIKE '%<value_#6>%' ) )
        AND ( <column_#3_name> >= to_date( '<value_#5>', 'ddmmyyyy' ) 
        AND <column_#3_name> <= to_date( '<value_#6> 23:59:59', 'ddmmyyyy hh24:mi:ss' ) ) 
    GROUP BY <column_#1_name>, <column_#4_name>
    ORDER BY <column_#1_name>, <column_#4_name> ASC
 
SELECT a.<column_#1_name> AS <alias_#1_name>, 
        to_char( trunc( a.<column_#2_name> ), 'dd-mm-yyyy' ) AS <alias_#2_name>, 
        count(*) AS <alias_3_name>
    FROM <table_name> a
    WHERE a.<column_#4_name> = '<value_#1>' AND a.<column_#5_name> = '<value_#2>'
    GROUP BY a.<column_#1_name>, trunc( a.<column_#2_name> )  
    ORDER BY trunc( a.<column_#2_name> ) DESC 
 
SELECT <column_#1_name> 
    FROM <table_schema>.<table_name> 
    WHERE ( ( UPPER( <column_#1_name> ) LIKE '%<value_#1.1>%<value_#1.2>%') OR ( UPPER( <column_#1_name> ) LIKE '%<value_#1.3>%<value_#1.4>%' ) ) 
            AND <column_#2_name> = '<value_#2>'
 
SELECT a.<column_#1_name>, count( * )
    FROM <table_name> a
    WHERE a.<column_#2_name> >= trunc( SYSDATE ) - 7 
        AND a.<column_#2_name> < trunc( SYSDATE ) 
        AND a.<column_#3_name> = '<value_#1>' 
        AND a.<column_#4_name> = '<value_#2>'
    GROUP BY a.<column_#1_name>
 
SELECT distinct( a.<column_#1_name> )
    FROM <table_name> a
    WHERE a.<column_#2_name> LIKE '<value_#2>%' AND a.<column_#3_name> = '<value_#3>'
 
SELECT DISTINCT( <column_#1_name> ), <column_#2_name> 
    FROM <table_schema>.<table_name> 
    WHERE <column_#3_name> = to_char( SYSDATE, 'DD' ) 
        AND <column_#4_name> >= TRUNC( SYSDATE ) - 1
        AND <column_#2_name> IN ( '<value_#1>', '<value_#2>' )     
 
SELECT <table_#2_alias>.<column_#1_name> 
    FROM <table_schema>.<table_#1_name> <table_#1_alias>, <table_schema>.<table_#2_name> <table_#2_alias>
    Where <table_#1_alias>.<column_#2_name> = '<value_#1>' 
        AND <table_#1_alias>.<column_#3_name> IN ( '<value_#2>, '<value_#3>' ) 
        AND <table_#1_alias>.<column_#4_name> = <table_#2_alias>.<column_#5_name> 
        AND <table_#1_alias>.<column_#6_name> = '<value_#4>'
 
SELECT * 
    FROM (
        SELECT <column_#1_name>, <column_#2_name> 
            FROM <table_#1_name> 
            WHERE <column_#3_name> = '<value_#1>'
        UNION all
        SELECT <column_#1_name>, <column_#2_name> 
            FROM <table_#2_name> )
    ORDER BY <column_#1_name> ASC
 
SELECT <column_#1_name>, 
        decode( <column_#2_name>, '0', '', '1', 'YES' ) AS <alias_#2_name>, 
        '' AS <alias_#3_name>, 
        count( * ) AS <alias_4_name> 
    FROM <table_name>
    WHERE <column_#3_name> IS NULL
    GROUP BY <column_#1_name>, <column_#2_name> 
UNION
SELECT <column_#1_name>, 
        decode( <column_#2_name>, '0', '', '1', 'YES' ) AS <alias_#2_name>, 
        'YES' AS <alias_#3_name>, 
        count( * ) AS <alias_4_name> 
    FROM <table_name>
    WHERE <column_#3_name> IS NOT NULL
    GROUP BY <column_#1_name>, <column_#2_name>
update table values
 
UPDATE <table_name>
    SET <column_#1_name> = '<value_#1_new>'
    WHERE <column_#1_name> = '<value_#1_old>' 
        AND <column_#2_name> = '<value_#2>' 
        AND <column_#3_name> = '<value_#3>' 
 
UPDATE <table_name> 
    SET <column_#1_name> = NULL 
    WHERE <column_#1_name> IS NOT NULL
 
UPDATE <table_name> 
    SET <column_#1_name> = '<value_#1>', <column_#2_name> = SYSDATE 
    WHERE <column_#3_name> = '<value_#3>'
insert table values
 
INSERT INTO <table_schema>.<table_name> ( <column_#1_name>, <column_#2_name>, <column_#3_name>  ) 
    VALUES ( <sequence_name>.nextval, '<value_#2>', to_date( '19052013 00:00:00', 'DDMMYYYY HH24:MI:SS' ) )
 
INSERT INTO <table_name> 
    VALUES ( '<value_#1>', '<value_#2>' )
 
INSERT INTO <table_name> 
    VALUES ( '<value_#1>', 'dummy', ' ', TRUNC( SYSDATE ), NULL, '<value_#2>', ' ' )
 
INSERT INTO <table_name> 
    VALUES ( TRUNC( SYSDATE ), <sequence_name>.NEXTVAL, '<value_#1>', NULL, '1', NULL, SYSDATE, NULL, 538, SYSDATE )
add table column
 
ALTER TABLE <table_name>
    ADD <column_name> varchar2( 60 )
 
ALTER TABLE <table_name>
    ADD <column_name> DATE  DEFAULT SYSDATE
alter column size
 
ALTER TABLE <table_name> 
    MODIFY <column_name> VARCHAR2( 320 ) 
alter table sequence cache size
 
ALTER SEQUENCE <sequence_name> cache 20
delete table values
 
DELETE FROM <table_name> 
    WHERE <column_name> IS NOT NULL
 
DELETE FROM <table_name> 
    WHERE <column_#1_name> = '<value_#1>' 
        AND ( trim( <column_#2_name> ) IS NULL OR length( trim( <column_#2_name> ) ) = 0 )
drop table
 
DROP TABLE <table_name>
drop table column
 
ALTER TABLE <table_name>
    DROP COLUMN <column_name>
drop index
 
DROP INDEX <index_name>
drop sequence
 
DROP SEQUENCE <table_schema>.<sequence_name>
truncate table
 
TRUNCATE TABLE <table_name>
show db jobs
 
SELECT * FROM user_jobs
pl/sql block execution Execute the following PL/SQL block - stored procedure via the "sqlplus" utility:
 
SET serveroutput ON
DECLARE 
    <input_parameter_#1_name> varchar2(30) := '<value_#1>';
    <input_parameter_#2_name> varchar2(30) := '<value_#2>';
    <output_parameter_#3_name> number;
    <output_parameter_#4_name> varchar2(30);            
BEGIN
    <table_schema>.<stored_procedure_name>(<input_parameter_#1_name>, <input_parameter_#2_name>,
                                           <output_parameter_#3_name>, <output_parameter_#4_name> );
    DBMS_OUTPUT.PUT_LINE( 'output: ' || <output_parameter_#3_name> );
    DBMS_OUTPUT.PUT_LINE( 'output: ' || <output_parameter_#4_name> );
END;
show long-running queries
 
SELECT * FROM 
    ( SELECT username, opname, sid, serial#, context, sofar, totalwork, round( sofar / totalwork * 100, 2 ) "% Complete" 
        FROM v$session_longops ) 
    WHERE "% Complete" != 100
sid of established db session
 
SELECT distinct( SID ) 
    FROM V$MyStat
show db sequences
 
SELECT * 
    FROM user_sequences
 
SELECT last_number - increment_by currval 
    FROM user_sequences 
    WHERE SEQUENCE_NAME = '<sequence_name>'
tnsnames.ora Network configuration file @ "c:\Oracle\Ora9i\NETWORK\ADMIN\":
 
<table_#1_schema>.<domain> =
    ( DESCRIPTION =
        ( ADDRESS_LIST =
            ( ADDRESS = ( PROTOCOL = TCP )( HOST = <IP> )( PORT = <port> ) )
        )
        ( CONNECT_DATA =
            ( SERVICE_NAME = <service_name> )
        )
    )
<table_#2_schema>.<domain> =
    ( DESCRIPTION =
        ( ADDRESS_LIST =
            ( ADDRESS = ( PROTOCOL = TCP )( HOST = <IP> )( PORT = <port> ) )
        )
        ( CONNECT_DATA =
            ( SID = <sid> )
        )
    )   
connection-string configuration Connection without "tnsnames.ora":
 
sqlplus <username>/<password>@"(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=<hostname>)(PORT=<port>))(CONNECT_DATA=(SERVICE_NAME=<service_name>)))"
load data onto an Oracle Database Making use of "sqlldr" (SQL loader) utility: • Assuming that you have saved the data to be uploaded in a file with file name "import.dat" as follows (comma delimited):
    <column_#1.1_value>,<column_#2.1_value>,<column_#3.1_value>
    <column_#1.2_value>,<column_#2.2_value>,<column_#3.2_value>
    <column_#1.3_value>,<column_#2.3_value>,<column_#3.3_value>
    <column_#1.4_value>,<column_#2.4_value>,<column_#3.4_value>
• Prepare a control file "import.ctl" like the following one:
    LOAD DATA
    INFILE 'import.dat'
    APPEND
    INTO TABLE "<table_name>"
    FIELDS TERMINATED BY ','
    (
        "<column_#1_name>" CHAR,  
        "<column_#2_name>" CHAR,
        "<column_#3_name>" DATE "DD/MM/YYYY"
    )
where + INTO TABLE "<table_name>" defines the target table.
+ FIELDS TERMINATED BY ',' the comma delimited file format.
+ and the following block defines the table column names and format.
+ In practice the rows of the imported data file will be APPENDED at the end of the mentioned db table.
• Login to the proper server as <user> user and ftp the above import.dat and import.ctl files and save them under the same directory. • execute the following command:
    sqlldr <username>/<password>@<table_schema>.<domain> control = import.ctl log=import.log data=import.dat rows=5000
generally:
    sqlldr <username>/<password>@<table_schema>.<domain> control=<control filename> log=<export log filename> data=<import data filename> rows=<rows before commit>
The script commits automatically and in case where something goes wrong created files consisting of the error happened.
In any case, it creates a log file consisting of information about the status of the execution, the number of rows inserted, etc.

load data onto an Oracle Database | script for cdr upload "GitHub" repository of this script can be found at : https://github.com/aristotelis-metsinis/cdr-import (source code along with line by line comments). Making use of "sqlldr" (SQL loader) utility: • Assuming that you have adjusted the following "cdr_import.sh" shell script:
#!/bin/bash
#
# CDRs import script.
#
# The script imports CDRs produced by a service application. 
# * It may run for example once per day through "cron".
# * Gets all "CDR" files produced on the day before (if any) and 
# * converts them into "iso8859-7" as they (may) contain Greek text in "UTF8". 
# * The converted CDRs are being appended into a single (daily) file. 
# * It also moves each original "CDR" file under the proper "archive" directory.
# * Then it executes "SQL loader" Oracle utility, uploading the data onto the proper db table. 
# * In case where an "Oracle" exception has been thrown, it catches this error, submits the proper 
#   email notification and exits. 
# * Else, it retrieves the number of "successfully loaded" and the number of "total read" records, 
#   verifying whether the two numbers match or not submitting the proper email notification per case.
#

# Set the email of service "admin". Any notification will be sent to this email.
adminEmail=someone@somewhere.com

# Set the necessary db connection configurations.
dbport=<port>
dbsid=<id>
dbpwd=<pwd>
dbuser=<user>
dbname=<name>

# Calculate the date of the day before.
yesterday=`perl \-e '@y=localtime(time()-86400); printf "\%04d\%02d\%02d",$y[5]+1900,$y[4]+1,$y[3];'`

# Set the name "template" of the "CDR" files we are looking for; for example: 
filename=cdr_$yesterday

# Check if there are files for processing, which reside under for example "cdr" directory:
files=`ls cdr/$filename*.cdr | wc -l | sed 's/^[ ]*//g'`

# If no files found then submit the proper email notification and terminate.
if [ $files -eq 0 ] 
then
	subject="No CDRs for $yesterday..."
	message="No CDR files found..."
	(echo "$message") | mailx -s "$subject" $adminEmail

	exit 0
fi

# Get "CDR" files, convert their encoding, appending their data into a single file under the proper path 
# and finally move them into the proper "archive" dir.
for file in $(ls cdr/$filename*.cdr)
do
	iconv -c -f utf8 -t iso8859-7 $file >> cdr/closed/$filename.cdr
	mv $file cdr/archived/
done

# Execute "SQL loader" utility, uploading the "encoded" data file onto the proper db table.
sqlldr $dbuser/$dbpwd@"(DESCRIPTION\=(ADDRESS\=(PROTOCOL\=TCP)(HOST\=$dbname)(PORT\=$dbport))(CONNECT_DATA\=(SERVICE_NAME\=$dbsid)))" control=cdr_import.ctl log=cdr/log/$filename.log bad=cdr/log/$filename.bad data=cdr/closed/$filename.cdr rows=5000

# Check if an "Oracle" exception has been thrown. Submit the proper notification email and terminate.
if grep -q -i "ORA-" cdr/log/$filename.log 
then
	subject="CAUTION: error uploading CDRs for $yesterday..."
	message=`grep -i "ORA-" cdr/log/$filename.log`
    (echo "$message") | mailx -s "$subject" $adminEmail
        
	exit 0
fi

# Get number of "successfully loaded" and "total read CDR" rows.
cdrLoaded=`grep "successfully loaded" cdr/log/$filename.log | sed 's/^[ ]*//g' | cut -f 1 -d " "`
cdrRead=`grep "Total logical records read" cdr/log/$filename.log | cut -d " " -f 5- | cut -f 5- | sed 's/^[ ]*//g'`

# Check if both numbers are equal and submit the proper notification email.
if [ $cdrLoaded != $cdrRead ]
then
	subject="CAUTION: error uploading CDRs for $yesterday..."
	message="Total logical records read : $cdrRead, while successfully loaded : $cdrLoaded"
	(echo "$message") | mailx -s "$subject" $adminEmail
else
	subject="Success: CDRs uploaded for $yesterday..."
	message="Total logical records read and successfully loaded : $cdrLoaded"
	(echo "$message") | mailx -s "$subject" $adminEmail
fi

exit 0 
• and that you have also adjusted the following control file "cdr_import.ctl":
--
-- "SQL Loader" control file.
--
-- Contains the configuration necessary for "SQL Loader" utility. 
-- Uploads CDRs data on the proper db table for a service application.  
--
LOAD DATA
APPEND
INTO TABLE "<table_name>"
FIELDS TERMINATED BY '|'
TRAILING NULLCOLS
(
  "<column-1>" 	DATE "DDMMYYYY HH24:MI:SS",
  "<column-2>" 	CHAR,  
  "<column-3>" 	CHAR,
  "<column-4>" 	CHAR,
	:
	:
  "<column-n>" 	CHAR
)