Create Add this in config/setup_load_paths.rb
if ENV['MY_RUBY_HOME'] && ENV['MY_RUBY_HOME'].include?('rvm')
begin
#puts ENV['MY_RUBY_HOME']
rvm_path = File.dirname(File.dirname(ENV['MY_RUBY_HOME']))
rvm_lib_path = File.join(rvm_path, 'lib')
$LOAD_PATH.unshift rvm_lib_path
require 'rvm'
RVM.use_from_path! File.dirname(File.dirname(__FILE__))
rescue LoadError
# RVM is unavailable at this point.
raise "RVM ruby lib is currently unavailable."
end
end
Specify The rvm ruby under application virtualhost .
e.g:
LoadModule passenger_module /home/deploy/.rvm/gems/ruby-1.9.2-p0/gems/passenger-3.0.2/ext/apache2/mod_passenger.so
PassengerRoot /home/deploy/.rvm/gems/ruby-1.9.2-p0/gems/passenger-3.0.2
PassengerRuby /home/deploy/.rvm/environments/ruby-1.9.2-p0
All done :
Run apache under sudo'er from user which is set with RVM environment.
Wednesday, December 29, 2010
Friday, December 24, 2010
Clamav Getway Antivirus c-icap Setting For squid.conf
icap_enable on
icap_preview_enable on
icap_preview_size 128
icap_send_client_ip on
icap_service service_avi_req reqmod_precache 0 icap://localhost:1344/srv_clamav
icap_service service_avi respmod_precache 1 icap://localhost:1344/srv_clamav
icap_class class_antivirus service_avi service_avi_req
icap_access class_antivirus allow all
icap_preview_enable on
icap_preview_size 128
icap_send_client_ip on
icap_service service_avi_req reqmod_precache 0 icap://localhost:1344/srv_clamav
icap_service service_avi respmod_precache 1 icap://localhost:1344/srv_clamav
icap_class class_antivirus service_avi service_avi_req
icap_access class_antivirus allow all
Nmap To Discover Network IP,Openport,OS Type IN XML Format
#! /bin/bash
ETHERNET=eth0
HostList=(`arp -vni $ETHERNET |cut -d " " -f 1 |grep -r 192 |xargs`)
for (( i = 0 ; i <= ${#HostList[*]} ; i++ ))
do
nmap -F -O -o2 --osscan-limit -oX ${HostList[i]}_$i.xml ${HostList[i]}
done
ETHERNET=eth0
HostList=(`arp -vni $ETHERNET |cut -d " " -f 1 |grep -r 192 |xargs`)
for (( i = 0 ; i <= ${#HostList[*]} ; i++ ))
do
nmap -F -O -o2 --osscan-limit -oX ${HostList[i]}_$i.xml ${HostList[i]}
done
Bandwidthd CDF File Processing
#! /bin/bash
user_array=`cat log.*.cdf |cut -d , -f 1 |sort -u |xargs`
for B in $user_array
do
c=2
echo -e "For the IP $B"
echo "" > /tmp/port$$$
while [ $c -le 16 ]
do
cat log.*.cdf |grep $B |cut -d , -f $c |xargs >/tmp/ele$$$
I=0
for N in `cat /tmp/ele$$$`
do
I=`expr $I + $N`
done
echo $I >>/tmp/port$$$
(( c++ ))
done
sleep 2
echo -e "\t `cat /tmp/port$$$ |xargs`"
done
rm -rf *.cdf
/etc/init.d/bandwidthd restart
user_array=`cat log.*.cdf |cut -d , -f 1 |sort -u |xargs`
for B in $user_array
do
c=2
echo -e "For the IP $B"
echo "" > /tmp/port$$$
while [ $c -le 16 ]
do
cat log.*.cdf |grep $B |cut -d , -f $c |xargs >/tmp/ele$$$
I=0
for N in `cat /tmp/ele$$$`
do
I=`expr $I + $N`
done
echo $I >>/tmp/port$$$
(( c++ ))
done
sleep 2
echo -e "\t `cat /tmp/port$$$ |xargs`"
done
rm -rf *.cdf
/etc/init.d/bandwidthd restart
How To Keep Track On Disk Usage @Linux
#! /bin/bash
# chetan.muneshwar@gmail.com
# script to get disk space status on multiple instances and alert Admin
bulk_data=(`df -Ph | grep -r '/' | awk '{ printf "%s-%s-%s-%s ",$3,$4,$5,$6 }'`)
for (( i = 0 ; i < ${#bulk_data[*]}; i++ ))
do
threshold="`echo ${bulk_data[i]}|cut -d "-" -f 3|cut -d "%" -f 1` "
if [ $threshold -le 0 ] ; then
shm_val="`echo ${bulk_data[i]}|grep -r 'shm'`"
if [ "$shm_val" == "" ] ; then
echo "SERVER ${bulk_data[i]} " | mail -s " Disk vol fuLL ISHY-SERVER " chetan.muneshwar@labs.net -c shrikant.lokhande@labs.net
else
echo ""
fi
else
if [ $threshold -le 90 ] ;then
echo ""
else
echo "SERVER ${bulk_data[i]} " | mail -s " Disk vol warning ISHY-SERVER " chetan.muneshwar@labs.net -c shrikant.lokhande@labs.net
fi
fi
done
Finally add cronjob : * * * * * /usr/bin/DISK
# chetan.muneshwar@gmail.com
# script to get disk space status on multiple instances and alert Admin
bulk_data=(`df -Ph | grep -r '/' | awk '{ printf "%s-%s-%s-%s ",$3,$4,$5,$6 }'`)
for (( i = 0 ; i < ${#bulk_data[*]}; i++ ))
do
threshold="`echo ${bulk_data[i]}|cut -d "-" -f 3|cut -d "%" -f 1` "
if [ $threshold -le 0 ] ; then
shm_val="`echo ${bulk_data[i]}|grep -r 'shm'`"
if [ "$shm_val" == "" ] ; then
echo "SERVER ${bulk_data[i]} " | mail -s " Disk vol fuLL ISHY-SERVER " chetan.muneshwar@labs.net -c shrikant.lokhande@labs.net
else
echo ""
fi
else
if [ $threshold -le 90 ] ;then
echo ""
else
echo "SERVER ${bulk_data[i]} " | mail -s " Disk vol warning ISHY-SERVER " chetan.muneshwar@labs.net -c shrikant.lokhande@labs.net
fi
fi
done
Finally add cronjob : * * * * * /usr/bin/DISK
Monday, November 1, 2010
Anaconda Installer With Custom Brand/Product name
Mount stage2.img from iso .
edit mount_root/usr/lib/anaconda/product.py
comment the line as below save rebuild stage2.img
Thats it in short .
edit mount_root/usr/lib/anaconda/product.py
comment the line as below save rebuild stage2.img
[root@localhost anaconda]# cat product.py
#
# product.py: product identification string
#
import os
#if os.access("/tmp/product/.buildstamp", os.R_OK):
# path = "/tmp/product/.buildstamp"
#elif os.access("/.buildstamp", os.R_OK):
# path = "/.buildstamp"
#elif os.environ.has_key("PRODBUILDPATH") and \
# os.access(os.environ["PRODBUILDPATH"], os.R_OK):
# path = os.environ["PRODBUILDPATH"]
#else:
# path = None
productStamp = ""
productName = "SimpleWall"
productVersion = "bluesky"
productPath = "CentOS"
bugUrl = "Cnetos bugy."
#if path is not None:
# f = open(path, "r")
# lines = f.readlines()
# if len(lines) >= 3:
# productStamp = lines[0][:-1]
# productName = lines[1][:-1]
# productVersion = lines[2][:-1]
# if len(lines) >= 4:
# productPath = lines[3][:-1]
# if len(lines) >= 5:
# bugUrl = lines[4][:-1]
#if os.environ.has_key("ANACONDA_PRODUCTNAME"):
# productName = os.environ["ANACONDA_PRODUCTNAME"]
#if os.environ.has_key("ANACONDA_PRODUCTVERSION"):
# productVersion = os.environ["ANACONDA_PRODUCTVERSION"]
#if os.environ.has_key("ANACONDA_PRODUCTPATH"):
# productPath = os.environ["ANACONDA_PRODUCTPATH"]
#if os.environ.has_key("ANACONDA_BUGURL"):
# bugUrl = os.environ["ANACONDA_BUGURL"]
#
# product.py: product identification string
#
import os
#if os.access("/tmp/product/.buildstamp", os.R_OK):
# path = "/tmp/product/.buildstamp"
#elif os.access("/.buildstamp", os.R_OK):
# path = "/.buildstamp"
#elif os.environ.has_key("PRODBUILDPATH") and \
# os.access(os.environ["PRODBUILDPATH"], os.R_OK):
# path = os.environ["PRODBUILDPATH"]
#else:
# path = None
productStamp = ""
productName = "SimpleWall"
productVersion = "bluesky"
productPath = "CentOS"
bugUrl = "Cnetos bugy."
#if path is not None:
# f = open(path, "r")
# lines = f.readlines()
# if len(lines) >= 3:
# productStamp = lines[0][:-1]
# productName = lines[1][:-1]
# productVersion = lines[2][:-1]
# if len(lines) >= 4:
# productPath = lines[3][:-1]
# if len(lines) >= 5:
# bugUrl = lines[4][:-1]
#if os.environ.has_key("ANACONDA_PRODUCTNAME"):
# productName = os.environ["ANACONDA_PRODUCTNAME"]
#if os.environ.has_key("ANACONDA_PRODUCTVERSION"):
# productVersion = os.environ["ANACONDA_PRODUCTVERSION"]
#if os.environ.has_key("ANACONDA_PRODUCTPATH"):
# productPath = os.environ["ANACONDA_PRODUCTPATH"]
#if os.environ.has_key("ANACONDA_BUGURL"):
# bugUrl = os.environ["ANACONDA_BUGURL"]
Thats it in short .
How To Connect To Mysql With SSH tunnel
Remote connection of mysql is always pain for security concern ssh tunnel is best option .
#! /bin/bash
# Copy This script to /usr/bin/
Tunnel_check="`sudo netstat -tulnp 3307 |grep 127.0.0.1:3307`"
if [ "$Tunnel_check" == "" ] ;then
echo "Tunnel is really Running ?"
/usr/bin/ssh -2 -f -C -N deploy@ec2-23.45.124.compute-1.amazonaws.com -L 3307/127.0.0.1/3306
else
echo "Tunnel is Running"
fi
#! /bin/bash
# Copy This script to /usr/bin/
Tunnel_check="`sudo netstat -tulnp 3307 |grep 127.0.0.1:3307`"
if [ "$Tunnel_check" == "" ] ;then
echo "Tunnel is really Running ?"
/usr/bin/ssh -2 -f -C -N deploy@ec2-23.45.124.compute-1.amazonaws.com -L 3307/127.0.0.1/3306
else
echo "Tunnel is Running"
fi
Wednesday, October 20, 2010
Schedule Scripts With Interval of One Second
Cron jobs with default setting has default scheduling time Of 1MIN.
Below is the trick to operate in seconds.
#! /bin/bash
PROCNAME=’/usr/bin/xyz.rb’
PIDS=`ps -efa | grep $PROCNAME | grep -v grep | awk ‘{ print $2 }’`
for ff in $PIDS
do
echo “$ff” > /tmp/w
done
if [ -f "/tmp/w" ] ; then
pid=$(cat /tmp/w)
if [ "$pid" == "" ] ;then
/usr/local/bin/ruby /usr/bin/xyz.rb
else
echo “Already running $pid”
fi
rm -rf /tmp/w
else
/usr/local/bin/ruby /usr/bin/xyz.rb &
fi
Where xyz.rb is simple rb program in loop
require ‘rubygems’
require ‘daemons’
loop do
system(‘/usr/bin/Server_get |xargs >/var/www/html/text.txt’)
system(‘/usr/bin/get_app_stats |xargs >/var/www/html/app_stat.txt’)
sleep (25)
end
At the last add it to crontab
* * * * * /usr/bin/fast_cron
cheers its much useful in fast messaging,emailing campaigns .
Below is the trick to operate in seconds.
#! /bin/bash
PROCNAME=’/usr/bin/xyz.rb’
PIDS=`ps -efa | grep $PROCNAME | grep -v grep | awk ‘{ print $2 }’`
for ff in $PIDS
do
echo “$ff” > /tmp/w
done
if [ -f "/tmp/w" ] ; then
pid=$(cat /tmp/w)
if [ "$pid" == "" ] ;then
/usr/local/bin/ruby /usr/bin/xyz.rb
else
echo “Already running $pid”
fi
rm -rf /tmp/w
else
/usr/local/bin/ruby /usr/bin/xyz.rb &
fi
Where xyz.rb is simple rb program in loop
require ‘rubygems’
require ‘daemons’
loop do
system(‘/usr/bin/Server_get |xargs >/var/www/html/text.txt’)
system(‘/usr/bin/get_app_stats |xargs >/var/www/html/app_stat.txt’)
sleep (25)
end
At the last add it to crontab
* * * * * /usr/bin/fast_cron
cheers its much useful in fast messaging,emailing campaigns .
Thursday, October 7, 2010
Caching and Forward name server on linux
acl trusted {
192.168.0.0/24;
localhost;
};
options {
directory "/var/named";
dump-file "/var/named/data/cache_dump.db";
statistics-file "/var/named/data/named_stats.txt";
memstatistics-file "/var/named/data/named_mem_stats.txt";
forwarders {8.8.8.8;8.8.4.4};
listen-on port 53 { any; };
allow-query { trusted; };
allow-query-cache { trusted; };
};
logging {
channel default_debug {
file "data/named.run";
severity dynamic;
};
};
view localhost_resolver {
match-clients { trusted; };
match-destinations { trusted; };
recursion yes;
include "/etc/named.rfc1912.zones";
};
192.168.0.0/24;
localhost;
};
options {
directory "/var/named";
dump-file "/var/named/data/cache_dump.db";
statistics-file "/var/named/data/named_stats.txt";
memstatistics-file "/var/named/data/named_mem_stats.txt";
forwarders {8.8.8.8;8.8.4.4};
listen-on port 53 { any; };
allow-query { trusted; };
allow-query-cache { trusted; };
};
logging {
channel default_debug {
file "data/named.run";
severity dynamic;
};
};
view localhost_resolver {
match-clients { trusted; };
match-destinations { trusted; };
recursion yes;
include "/etc/named.rfc1912.zones";
};
Git Manual clone and push
For github:
git clone --bare git@github.com:myaccount/my-old-repo.git
git clone --bare git@github.com:myaccount/my-old-repo.git
cd my-old-repo
git push --mirror git@github.com:mycompany/our-new-repo.git
cd ..
rm -rf my-old-repo
Same can be applicable to private git using gitosis.
Monday, October 4, 2010
Use Memcache As Seesion Store in Rails
Install memcache :
yum install memcached .
chkconfig memcached on
/etc/init.d/memcached restart
Install gem :
gem install memcache-client
Add in environment.rb :
require 'memcache'
CACHE = MemCache.new(:namespace => "myapp)
CACHE.servers = '127.0.0.1:11211'
config.action_controller.session_store = :mem_cache_store
config.action_controller.session = {
:session_key => '_my_session',
:secret => '9abfc85851505e1a08sdfgsdfgsfdsfdgdsfgbb389be2acd0356ae0d4ea383f59cde7140ec7b04df473c31e1d4a4b9b78d55175d0c37bb29852c025d491c5cda9194ae',
:cache => CACHE,
:expires=>900 }
Note :
secret => do rake secret and paste the key .
port => default port is 11211
yum install memcached .
chkconfig memcached on
/etc/init.d/memcached restart
Install gem :
gem install memcache-client
Add in environment.rb :
require 'memcache'
CACHE = MemCache.new(:namespace => "myapp)
CACHE.servers = '127.0.0.1:11211'
config.action_controller.session_store = :mem_cache_store
config.action_controller.session = {
:session_key => '_my_session',
:secret => '9abfc85851505e1a08sdfgsdfgsfdsfdgdsfgbb389be2acd0356ae0d4ea383f59cde7140ec7b04df473c31e1d4a4b9b78d55175d0c37bb29852c025d491c5cda9194ae',
:cache => CACHE,
:expires=>900 }
Note :
secret => do rake secret and paste the key .
port => default port is 11211
Clear Session From Database In Ruby On Rails
Create: app/models/session.rb
Session = CGI::Session::ActiveRecordStore::Session
Session.class_eval do
def self.sweep!
delete_all ['updated_at < ?', 15.minutes.ago.utc]
end
Add One cron jobs and swap scheduled :
1 4 * * * /usr/local/bin/ruby /app/apps/current/script/runner -e production Session.sweep!
Session = CGI::Session::ActiveRecordStore::Session
Session.class_eval do
def self.sweep!
delete_all ['updated_at < ?', 15.minutes.ago.utc]
end
Add One cron jobs and swap scheduled :
1 4 * * * /usr/local/bin/ruby /app/apps/current/script/runner -e production Session.sweep!
Monday, September 13, 2010
How To Deploy Application To Remote Server Using Chef-Solo From Local Machine
Hello Guys
Since chef-solo never uses chef server as storage for cookbook , it needs all resources on the machine where deployment has to be done. below is the simple ruby code by which we can fulfill dependency of chef-solo and deploy remote server easily :
chef_deploy.rb :
#!/usr/bin/env ruby
require 'rubygems'
require 'net/ssh'
require 'net/scp'
require 'net/sftp'
$CONF_FILE_NAME = ARGV[0]
require 'yaml'
class AutoDeploy
attr_accessor :ip, :pass, :uname, :filename , :remote_webapps_path , :run_program ,:mod_name
def initialize(config_name)
@config_name = config_name
end
def read_config
config = YAML.load_file("#{@config_name}")
@uname = config["config"]["uname"]
@ip = config["config"]["ip"]
@pass = config["config"]["pass"]
@filename = config["config"]["filename"]
@mod_name = config["config"]["mod_name"]
@run_program = config["config"]["run_program"]
@remote_webapps_path= config["config"]["remote_webapps_path"]
end
def run_ruby
Net::SSH.start("#{@ip}", "#{@uname}", :password => "#{@pass}" ) do |ssh|
rest = ssh.exec("#{@run_program}")
end
end
def do_scp_tasks
Net::SCP.start("#{@ip}", "#{@uname}", :password => "#{@pass}" ) do |scp|
scp.upload!( "#{@filename}" , "#{@remote_webapps_path}" , :recursive => true )
end
end
end
deploy = AutoDeploy.new($CONF_FILE_NAME)
deploy.read_config
@git_var = deploy.mod_name
puts deploy.mod_name
deploy.do_scp_tasks
puts "[INFO] chef_resources copy done successfully"
deploy.run_ruby
puts "[INFO] chef done successfully"
app_details.yml :
config:
mod_name: _deploy
uname: root
ip: 113.213.216.148
pass: aap-stagiwwwneegV1ce
filename: programs/chef-101.tar.gz
remote_webapps_path: /opt
run_program: "cd /opt/ && tar -zxvf chef-101.tar.gz && cd /opt/chef-101/ && chef-solo -l debug -c config/solo.rb -j config/dna.json"
1. create direcory
mkdir /opt/chef_me/ and save above two files :
ls /opt/chef_me/
chef_deploy.rb app_details.yml programs/
2 . programs folder which contains compressed tar of your solo recipes .
3 . call program : ruby chef_deploy.rb app_details.yml
Since chef-solo never uses chef server as storage for cookbook , it needs all resources on the machine where deployment has to be done. below is the simple ruby code by which we can fulfill dependency of chef-solo and deploy remote server easily :
chef_deploy.rb :
#!/usr/bin/env ruby
require 'rubygems'
require 'net/ssh'
require 'net/scp'
require 'net/sftp'
$CONF_FILE_NAME = ARGV[0]
require 'yaml'
class AutoDeploy
attr_accessor :ip, :pass, :uname, :filename , :remote_webapps_path , :run_program ,:mod_name
def initialize(config_name)
@config_name = config_name
end
def read_config
config = YAML.load_file("#{@config_name}")
@uname = config["config"]["uname"]
@ip = config["config"]["ip"]
@pass = config["config"]["pass"]
@filename = config["config"]["filename"]
@mod_name = config["config"]["mod_name"]
@run_program = config["config"]["run_program"]
@remote_webapps_path= config["config"]["remote_webapps_path"]
end
def run_ruby
Net::SSH.start("#{@ip}", "#{@uname}", :password => "#{@pass}" ) do |ssh|
rest = ssh.exec("#{@run_program}")
end
end
def do_scp_tasks
Net::SCP.start("#{@ip}", "#{@uname}", :password => "#{@pass}" ) do |scp|
scp.upload!( "#{@filename}" , "#{@remote_webapps_path}" , :recursive => true )
end
end
end
deploy = AutoDeploy.new($CONF_FILE_NAME)
deploy.read_config
@git_var = deploy.mod_name
puts deploy.mod_name
deploy.do_scp_tasks
puts "[INFO] chef_resources copy done successfully"
deploy.run_ruby
puts "[INFO] chef done successfully"
app_details.yml :
config:
mod_name: _deploy
uname: root
ip: 113.213.216.148
pass: aap-stagiwwwneegV1ce
filename: programs/chef-101.tar.gz
remote_webapps_path: /opt
run_program: "cd /opt/ && tar -zxvf chef-101.tar.gz && cd /opt/chef-101/ && chef-solo -l debug -c config/solo.rb -j config/dna.json"
1. create direcory
mkdir /opt/chef_me/ and save above two files :
ls /opt/chef_me/
chef_deploy.rb app_details.yml programs/
2 . programs folder which contains compressed tar of your solo recipes .
3 . call program : ruby chef_deploy.rb app_details.yml
Friday, September 3, 2010
How To Lock Cron Job From Overruning In Linux
Just donwload http://www.unixwiz.net/tools/lockrun.c
# compile it
$ gcc lockrun.c -o lockrun
$ sudo cp lockrun /usr/bin/
# add to cron job e.g in single line
*/5 * * * * /usr/bin/lockrun --lockfile=/var/run/mcron.lockrun -- /usr/bin /mcron.sh
# -- before name of cronjob is necessary
# compile it
$ gcc lockrun.c -o lockrun
$ sudo cp lockrun /usr/bin/
# add to cron job e.g in single line
*/5 * * * * /usr/bin/lockrun --lockfile=/var/run/mcron.lockrun -- /usr/bin /mcron.sh
# -- before name of cronjob is necessary
Extra Password Authetication Along With Dialog In Shell Script in Linux
#! /bin/bash
flash_box() {
dialog --infobox $1 5 50
sleep .1
}
console_login()
{
data=/tmp/_dta$$$
# trap it
trap "rm -f $data" 0 1 2 5 15
# get password
dialog --title "Password" \
--clear \
--insecure \
--passwordbox "Enter your password" 10 30 2> $data
ret=$?
# make decision
case $ret in
0)
if [ "$(cat $data)" == "chetanMbetter" ] ;then
rm -rf /tmp/chetu
exec bash
else
if [ "$(cat $data)" == "" ] ;then
flash_box Password_empty
sleep 2
else
flash_box Invalid_password
sleep 2
fi
touch /tmp/chetu
fi
;;
*)
main_menu
;;
esac
}
console_login
# save above script as /usr/bin/anyname.sh
flash_box() {
dialog --infobox $1 5 50
sleep .1
}
console_login()
{
data=/tmp/_dta$$$
# trap it
trap "rm -f $data" 0 1 2 5 15
# get password
dialog --title "Password" \
--clear \
--insecure \
--passwordbox "Enter your password" 10 30 2> $data
ret=$?
# make decision
case $ret in
0)
if [ "$(cat $data)" == "chetanMbetter" ] ;then
rm -rf /tmp/chetu
exec bash
else
if [ "$(cat $data)" == "" ] ;then
flash_box Password_empty
sleep 2
else
flash_box Invalid_password
sleep 2
fi
touch /tmp/chetu
fi
;;
*)
main_menu
;;
esac
}
console_login
# save above script as /usr/bin/anyname.sh
How To Restrict Linux Console For single program
with .bashrc we can do that
Modify the .bashrc in users home folder
# .bashrc
# User specific aliases and functions
alias rm='rm -i'
alias cp='cp -i'
alias mv='mv -i'
# Source global definitions
if [ -f /etc/bashrc ]; then
clear
if [ -f /tmp/chetu ]; then
. /etc/bashrc
exec /usr/bin/MY_program.sh
else
touch /tmp/chetu
fi
fi
Last do : source ~/.bashrc
All done :)
Modify the .bashrc in users home folder
# .bashrc
# User specific aliases and functions
alias rm='rm -i'
alias cp='cp -i'
alias mv='mv -i'
# Source global definitions
if [ -f /etc/bashrc ]; then
clear
if [ -f /tmp/chetu ]; then
. /etc/bashrc
exec /usr/bin/MY_program.sh
else
touch /tmp/chetu
fi
fi
Last do : source ~/.bashrc
All done :)
Thursday, September 2, 2010
How To Use php mail() Function With Remote Smtp MailServer
Mail function in php, developers wants painless coding with email functionality .
we can still use remote smtp mail server with php mail function as below
# Install ssmtp
yum install ssmtp
# configure ssmtp
/etc/ssmtp/ssmtp.conf
root=noreply@testdomain.com
mailhub=myauthsmtp.com:2525
rewriteDomain=testdomain.com
hostname=myauthsmtp.com:2525
## ssl enabled with smtp then
UseSTARTTLS=YES
AuthUser=username
AuthPass=password
FromLineOverride=YES
# php.ini
replace
mailcmd=/usr/sbin/sendmail -t
with
mailcmd=ssmtp -t
# Another option stop/remove sendmail and replace it with ssmtp no need to change in php.ini
sudo service sendmail stop
sudo chkconfig --levels 2345 sendmail off
# links as below
sudo mv /usr/sbin/sendmail /usr/sbin/sendmail.orig
sudo ln -s /usr/bin/ssmtp/sbin/ssmtp /usr/sbin/sendmail
Note: system services will use default sendmail so always keep into account FROM addresses root@yourdomain.com must not be configured in order to avoid smtp account suspension .
we can still use remote smtp mail server with php mail function as below
# Install ssmtp
yum install ssmtp
# configure ssmtp
/etc/ssmtp/ssmtp.conf
root=noreply@testdomain.com
mailhub=myauthsmtp.com:2525
rewriteDomain=testdomain.com
hostname=myauthsmtp.com:2525
## ssl enabled with smtp then
UseSTARTTLS=YES
AuthUser=username
AuthPass=password
FromLineOverride=YES
# php.ini
replace
mailcmd=/usr/sbin/sendmail -t
with
mailcmd=ssmtp -t
# Another option stop/remove sendmail and replace it with ssmtp no need to change in php.ini
sudo service sendmail stop
sudo chkconfig --levels 2345 sendmail off
# links as below
sudo mv /usr/sbin/sendmail /usr/sbin/sendmail.orig
sudo ln -s /usr/bin/ssmtp/sbin/ssmtp /usr/sbin/sendmail
Note: system services will use default sendmail so always keep into account FROM addresses root@yourdomain.com must not be configured in order to avoid smtp account suspension .
Tuesday, August 31, 2010
how to test smtp account with ruby
To check smtp account working or not use below ruby script for smtp verification save the file with ruby extension
require 'net/smtp'
#
message = <<>
To: A Test User
Subject: SMTP e-mail test
This is a test e-mail message.
MESSAGE_END
Net::SMTP.start('domain.com',25,'doamin.com','chetu','amin') do |smtp|
smtp.send_message message, 'chetu_242000@gmail.com',
'chetan.muneshwar@incom'
end
require 'net/smtp'
#
message = <<>
To: A Test User
Subject: SMTP e-mail test
This is a test e-mail message.
MESSAGE_END
Net::SMTP.start('domain.com',25,'doamin.com','chetu','amin') do |smtp|
smtp.send_message message, 'chetu_242000@gmail.com',
'chetan.muneshwar@incom'
end
Friday, August 27, 2010
How to tag Git code
#! /bin/bash
tag_me()
{
tag_name=$1
message=$2
TIME=`date +%Y-%m-%d-%H-%M`
git tag -d $tag_name
git tag -a -f -m "$message" $tag_name-$TIME
git push origin tag $tag_name-$TIME
}
if [ $# -ne 2 ] ;then
echo 1>&2 Usage: $0 tag_name message
exit 127
fi
tag_me $1 $2
usage :
./tag_scripts tag_name messages
tag_me()
{
tag_name=$1
message=$2
TIME=`date +%Y-%m-%d-%H-%M`
git tag -d $tag_name
git tag -a -f -m "$message" $tag_name-$TIME
git push origin tag $tag_name-$TIME
}
if [ $# -ne 2 ] ;then
echo 1>&2 Usage: $0 tag_name message
exit 127
fi
tag_me $1 $2
usage :
./tag_scripts tag_name messages
How to branch Git code and push to origin
#!/bin/bash
# git-create-branch
# git
if [ $# -ne 1 ]; then
echo 1>&2 Usage: $0 branch_name
exit 127
fi
branch_name=$1
git push origin origin:refs/heads/${branch_name}
git fetch origin
git checkout --track -b ${branch_name} origin/${branch_name}
git pull
============================================================
Usage :
./branching.sh branch_name_desired
# git-create-branch
# git
if [ $# -ne 1 ]; then
echo 1>&2 Usage: $0 branch_name
exit 127
fi
branch_name=$1
git push origin origin:refs/heads/${branch_name}
git fetch origin
git checkout --track -b ${branch_name} origin/${branch_name}
git pull
============================================================
Usage :
./branching.sh branch_name_desired
Recover data from mysqlbinlog
if server crash and after restore mysqlbinlog not get recovered there is possibility of loosing data. still we can get important data with restoring from bin-log.
* Guess the the time of crash copy all bin-log files to separate folder ,in-case if you have backup of old date you need to select only those bin-log file having date after the dump taken .
* Run the command as such [better to go with one by one db recovery] .
/usr/local/mysql/bin/mysqlbinlog --database=pottytraining mysql-bin.000033 mysql-bin.000034>/opt/pottytraining.sql
* login to mysql prompt
mysql> use pottytraining;
mysql> source /opt/pottytraining.sql;
* Cool see the data.
Thanks
* Guess the the time of crash copy all bin-log files to separate folder ,in-case if you have backup of old date you need to select only those bin-log file having date after the dump taken .
* Run the command as such [better to go with one by one db recovery] .
/usr/local/mysql/bin/mysqlbinlog --database=pottytraining mysql-bin.000033 mysql-bin.000034>/opt/pottytraining.sql
* login to mysql prompt
mysql> use pottytraining;
mysql> source /opt/pottytraining.sql;
* Cool see the data.
Thanks
monitor mongrel for rails application
#! /bin/bash
# ChetanM chetan.muneshwar@gmail.com
# Add to this script to /usr/bin/path
# Add */1 * * * * /usr/bin/monome in crontab
# please specify path of rails ROOT eg /home/myapp/ [not /home/myapp/public]
RAILS_ROOT=
cd $RAILS_ROOT/
pii()
{
PROCNAME=$1
PIDS=`ps -efa | grep $PROCNAME | grep -v grep | awk '{ print $2 }'`
for ff in $PIDS
do
echo "$ff"
done
}
mongrel_array=(3078 3079 3080 )
for (( i = 0 ; i < ${#mongrel_array[*]}; i++ ))
do
pii tmp/pids/mongrel.${mongrel_array[i]}.pid >/tmp/dat$$$
if [ "`cat /tmp/dat$$$`" == "" ] ;then
rm -rf tmp/pids/mongrel.${mongrel_array[i]}.pid
mongrel_rails cluster::restart --only ${mongrel_array[i]}
else
echo "accha hai "
fi
done
# ChetanM chetan.muneshwar@gmail.com
# Add to this script to /usr/bin/path
# Add */1 * * * * /usr/bin/monome in crontab
# please specify path of rails ROOT eg /home/myapp/ [not /home/myapp/public]
RAILS_ROOT=
cd $RAILS_ROOT/
pii()
{
PROCNAME=$1
PIDS=`ps -efa | grep $PROCNAME | grep -v grep | awk '{ print $2 }'`
for ff in $PIDS
do
echo "$ff"
done
}
mongrel_array=(3078 3079 3080 )
for (( i = 0 ; i < ${#mongrel_array[*]}; i++ ))
do
pii tmp/pids/mongrel.${mongrel_array[i]}.pid >/tmp/dat$$$
if [ "`cat /tmp/dat$$$`" == "" ] ;then
rm -rf tmp/pids/mongrel.${mongrel_array[i]}.pid
mongrel_rails cluster::restart --only ${mongrel_array[i]}
else
echo "accha hai "
fi
done
Subscribe to:
Posts (Atom)