How to count the number of emails sent per single account EXIM? - exim

My application needs to count in real time the number of emails and the time it was sent by exim, is it possible?
The connection is made by SMTP.

There are three ways to do that:
1 Parsing logs (worse approach).
2 RSyslog implementation plus Exim conf.
3 Exim with Mysql.
Rsyslog
Install syslog and syslog-mysql
[root#web ~]# yum install rsyslog rsyslog-mysql
Basic configuration
[root#web ~]# mysql
mysql> CREATE DATABASE Syslog;
mysql> USE Syslog;
mysql> CREATE TABLE `SmtpMailLog` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Hostname` varchar(255) NOT NULL,
`EximID` varchar(16) NOT NULL,
`DateIn` datetime DEFAULT NULL,
`DateLastProcessed` datetime DEFAULT NULL,
`DateCompleted` datetime DEFAULT NULL,
`FromAddr` varchar(100) DEFAULT NULL,
`FromAddrHost` varchar(100) DEFAULT NULL,
`FirstToAddr` varchar(100) DEFAULT NULL,
`AdditionalToAddr` text,
`HostFrom` varchar(100) DEFAULT NULL,
`FirstHostTo` varchar(100) DEFAULT NULL,
`Size` int(11) DEFAULT NULL,
`Subject` varchar(255) DEFAULT NULL,
`Notes` varchar(255) DEFAULT NULL,
PRIMARY KEY (`Id`),
UNIQUE KEY `EximID` (`EximID`)
) ENGINE=MyISAM AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='--';
mysql> exit
[root#web ~]# echo "USE mysql; CREATE USER rsyslog; FLUSH PRIVILEGES;" | mysql
[root#web ~]# echo "USE mysql; GRANT ALL PRIVILEGES ON Syslog.* TO 'rsyslog'#'127.0.0.1' IDENTIFIED BY 'rsysl0g'; FLUSH PRIVILEGES;" | mysql
[root#web ~]# echo "USE mysql; SET PASSWORD FOR 'rsyslog'#'127.0.0.1' = PASSWORD('rsysl0g'); FLUSH PRIVILEGES;" | mysql
[root#web ~]# /bin/cat << EOF > /etc/rsyslog.conf
# Modules --------------------------------------------------------------------
# Input
$ModLoad imuxsock.so # Unix sockets
# Output
$ModLoad ommysql.so # Log to MySQL
# Globals --------------------------------------------------------------------
# There are many more - see docs
# Files and dirs are created as needed (dirs only for "dynamic" files)
$umask 0000
$DirCreateMode 0640
$FileCreateMode 0640
#$FileOwner rsyslog
#$FileGroup rsyslog
#$DirOwner rsyslog
#$DirGroup rsyslog
$RepeatedMsgReduction on
# Include package specific logs (including rsyslog itself)
$IncludeConfig /etc/rsyslog.d/*.conf
# Log to the console
*.* -/var/log/exim/main.log
& ~
EOF
Parser data Configuration
[root#web ~]# /bin/cat << EOF > /etc/rsyslog.d/20-mail.conf
# ###############################################################
# Mail system logging
# Exim, Spam Assassin, SA-Exim, ClamAV
# /etc/rsyslog.d/20-mail.conf
# ###############################################################
# NOTES
# Careful with quotes in if clauses
# seems to need ' and not " (JG 11 Jun 2009)
# Multi line logging from Exim "detector":
# :msg, regex, " \[[0-9]{1,3}[\\/][0-9]{1,3}\]" ~
# email address finder:
# %msg:R,ERE,0,ZERO:[A-Za-z0-9._%+-]+#[A-Za-z0-9.-]+\.[A-Za-z]{2,4}--end%
# Exim ID finder:
# %msg:R,ERE,0,ZERO:[A-Za-z0-9]{6}-[A-Za-z0-9]{6}-[A-Za-z0-9]{2}--end%
# Easier to read log format:
# $template Mail-Exim-File-Format,"%timegenerated:1:10:date-rfc3339% %timegenerated:12:19:date-rfc3339% %hostname% %syslogtag%%msg%\n"
#########################################################
# Syslog style to support OSSEC (JG 26 AUg 2009)
$template Mail-Exim-File-Format,"%timegenerated% %HOSTNAME% %syslogtag%%msg%\n"
#########################################################
# Amalgamated logging templates
# The log entry is built up an initial entry from ClamAV followed by successive updates from the vaious components, in the order
# of the templates here. The EximID is used to look up the entry except for SA-Exim (which uses the msgid).
# <= - In
# Local:
# Sep 15 09:06:17 loghost exim[20787]: 1MnT3J-0005PH-2y <= nagios#example.com U=nagios P=local S=794 T="** PROBLEM Service Alert: host-name/NTP-peer is CRITICAL **"
# Sep 22 10:40:59 portal exim[12557]: 1Mq1rn-0003GX-MZ <= root#blueloop.net U=root P=local S=516 T="test message"
# Relayed:
# Sep 15 09:03:38 loghost exim[20078]:
# 1MnT0g-0005Dq-BC <= user#example.com H=host.example.com [192.168.100.100] P=esmtp S=8690192 id=4AAF585B020000AA0004ED5B#port.blueloop.net T="Subject line from message"
# If an arg to CONCAT is NULL then the whole output is NULL
$template Mail-Exim-In-Amalgamated,"REPLACE INTO SmtpMailLog \
( \
Hostname, \
EximID, \
DateIn, \
DateLastProcessed, \
FirstToAddr, \
FromAddr, \
FromAddrHost, \
AdditionalToAddr, \
HostFrom, \
Size, \
Subject, \
FirstHostTo \
) \
VALUES \
( \
'%hostname%', \
'%msg:R,ERE,0,ZERO:[A-Za-z0-9]{6}-[A-Za-z0-9]{6}-[A-Za-z0-9]{2}--end%', \
'%timereported:::date-mysql%', \
'%timereported:::date-mysql%', \
'%msg:R,ERE,0,ZERO:([A-Za-z0-9._%+-]+#[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$)--end%', \
'%msg:R,ERE,0,ZERO:[A-Za-z0-9._%+-]+#[A-Za-z0-9.-]+\.[A-Za-z]{2,4}--end%', \
substring_index('%msg:R,ERE,0,ZERO:[A-Za-z0-9._%+-]+#[A-Za-z0-9.-]+\.[A-Za-z]{2,4}--end%', '#', -1), \
'', \
SUBSTRING('%msg:R,ERE,0,ZERO:H=.*\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}]--end%' FROM 3), \
SUBSTRING('%msg:R,ERE,0,ZERO:S=[0-9]{1,}--end%' FROM 3), \
SUBSTRING('%msg:R,ERE,0,ZERO:T=.*--end%' FROM 3), \
'pending' \
) \
",SQL
# ** - Failed
$template Mail-Exim-Fail-Amalgamated,"UPDATE SmtpMailLog \
SET \
DateLastProcessed = '%timereported:::date-mysql%', \
FirstToAddr = 'Failed - see notes', \
FirstHostTo = 'Failed - see notes', \
Notes = '%msg%' \
WHERE EximID = '%msg:R,ERE,0,ZERO:[A-Za-z0-9]{6}-[A-Za-z0-9]{6}-[A-Za-z0-9]{2}--end%' \
",SQL
# => - Out
$template Mail-Exim-Out-Amalgamated, "UPDATE SmtpMailLog \
SET \
FirstToAddr = '%msg:R,ERE,0,ZERO:[A-Za-z0-9._%+-]+#[A-Za-z0-9.-]+\.[A-Za-z]{2,4}--end%', \
FirstHostTo = SUBSTRING('%msg:R,ERE,0,ZERO:H=.*]--end%' FROM 3), \
DateLastProcessed = '%timereported:::date-mysql%', \
Notes = 'Out' \
WHERE EximID = '%msg:R,ERE,0,ZERO:[A-Za-z0-9]{6}-[A-Za-z0-9]{6}-[A-Za-z0-9]{2}--end%' \
",SQL
# -> - additional deliveries
$template Mail-Exim-Add-Amalgamated, "UPDATE SmtpMailLog \
SET \
AdditionalToAddr = CONCAT_WS(' ',AdditionalToAddr,'%msg:R,ERE,0,ZERO:[A-Za-z0-9._%+-]+#[A-Za-z0-9.-]+\.[A-Za-z]{2,4}--end%'), \
DateLastProcessed = '%timereported:::date-mysql%', \
Notes = 'Additional delivery' \
WHERE EximID = '%msg:R,ERE,0,ZERO:[A-Za-z0-9]{6}-[A-Za-z0-9]{6}-[A-Za-z0-9]{2}--end%' \
",SQL
# Completed
$template Mail-Exim-Completed-Amalgamated,"UPDATE SmtpMailLog \
SET \
DateCompleted = '%timereported:::date-mysql%', \
DateLastProcessed = '%timereported:::date-mysql%', \
Notes = 'Completed' \
WHERE EximID = '%msg:R,ERE,0,ZERO:[A-Za-z0-9]{6}-[A-Za-z0-9]{6}-[A-Za-z0-9]{2}--end%' \
",SQL
#########################################################
# Full Exim log (bar the bits that are filtered out above) - file
if $programname == 'exim' then /var/log/exim/main.log;Mail-Exim-File-Format
###################################
# Amalgamated Mail log - single line per mail, some details lost - DB
#if $programname == 'exim' \
# and $msg contains 'dovecot_login' \
#then :ommysql:127.0.0.1,Syslog,rsyslog,rsysl0g;Mail-Exim-New-Amalgamated
if $programname == 'exim' \
and $msg contains '<=' \
then :ommysql:127.0.0.1,Syslog,rsyslog,rsysl0g;Mail-Exim-In-Amalgamated
if $programname == 'exim' \
and $msg contains '=>' \
then :ommysql:127.0.0.1,Syslog,rsyslog,rsysl0g;Mail-Exim-Out-Amalgamated
if $programname == 'exim' \
and $msg contains '->' \
then :ommysql:127.0.0.1,Syslog,rsyslog,rsysl0g;Mail-Exim-Add-Amalgamated
if $programname == 'exim' \
and $msg contains '**' \
then :ommysql:127.0.0.1,Syslog,rsyslog,rsysl0g;Mail-Exim-Fail-Amalgamated
if $programname == 'exim' \
and $msg contains 'Completed' \
then :ommysql:127.0.0.1,Syslog,rsyslog,rsysl0g;Mail-Exim-Completed-Amalgamated
##################################
# Dump Exim messages
if $programname == 'exim' then ~
EOF
Adjust exim log selector:
[root#web ~]# vi /etc/exim/exim.conf
log_selector = +incoming_port +smtp_connection +all_parents +retry_defer +subject +arguments +received_recipients
--
Exim Mysql
Install dependencies.
[root#web ~]# yum install exim-mysql
Add exim mysql connection.
[root#web ~]# vi /etc/exim/exim.conf
hide mysql_servers = 127.0.0.1/{DATABASE}/{USER}/{PASSWORD}
Is possible to use the same table structure as used on Rsyslog instalation.
On acl_smtp_data section, add some like that:
acl_smtp_data:
warn
continue = ${lookup mysql{INSERT INTO SmtpMailLog \
(\
AdditionalToAddr \
)\
values \
(\
'${quote_mysql:$recipients}' \
)}}

Related

sqoop import command issue with partition key

I am trying to import data in hive table using sqoop command. The hive table is partitioned by date2 and date is in the format of "9/6/2017 00:00:00". It's throwing error when I use sqoop command to import data using the date column.
Teradata table :
column1, date2, column3
1,9/6/2017 00:00:00, qwe
2,9/20/2017 00:00:00, wer
Sqoop command:
sqoop import \
--connect jdbc:teradata://<server>/database=<db_name> \
--connection-manager org.apache.sqoop.teradata.TeradataConnManager \
--username un \
--password 'pwd' \
--table <tbl_name> \
--where "cast(date2 as Date) > date '2017-09-07' and cast(date2 as Date) < date '2017-09-20'" \
--hive-import --hive-table <db_name>.<tbl_name> \
--hive-partition-key date2 \
-m1
Error
ERROR teradata.TeradataSqoopImportHelper: Exception running Teradata import job
java.lang.IllegalArgumentException:Wrong FS: /usr/tarun/date2=1900-01-01 00%3A00%3A00
When I tried translating your command to multiline, it looks you have missed one \ character and that's why it looks it is complaining. --hive-import is not ending with "\". The hive table name is also missing in the command
sqoop import \
--connect jdbc:teradata:///database= \
--connection-manager org.apache.sqoop.teradata.TeradataConnManager \
--username un \
--password 'pwd' \
--table \
--where "cast(date2 as Date) > date '2017-09-07' and cast(date2 as Date) < date '2017-09-20'" \
--hive-import \
--hive-table tarun121 \
--hive-partition-key date2 \
-m1
alternate to this is to try create-hive-table command
sqoop create-hive-table \
--connect jdbc:teradata:://localhost:port/schema \
--table hive_tble_name \
--fields-terminated-by ',';
let me know if this solves the issue.

Snakemake: how to change literal tab

I have a rule like :
rule merge_fastq_by_lane:
input:
r1 = get_fastq_r1,
r2 = get_fastq_r2
output:
r1_o = "{sample}/fastq/lanes/{sample}_{unit}_R1.fastq",
r2_o = "{sample}/fastq/lanes/{sample}_{unit}_R2.fastq",
bam = "{sample}/bam/lanes/{sample}_{unit}.bam"
threads:
1
message:
"Merge fastq from the same sample and lane and align using bwa"
shell:
"""
cat {input.r1} > {output.r1_o}
cat {input.r2} > {output.r2_o}
{bwa} mem -M -t {threads} -R "#RG\tID:{wildcards.sample}_{wildcards.unit}\tSM:{wildcards.sample}" {bwa_index} {output.r1_o} {output.r2_o} | {samtools} view -bS - | {samtools} sort - > {output.bam}
"""
And I have this error message due to tab issues in the -R parameter from bwa
bwa mem -M -t 1 -R "#RG ID:P1_L001 SM:P1" Homo_sapiens.GRCh37.dna.primary_assembly P1/fastq/lanes/P1_L001_R1.fastq P1/fastq/lanes/P1_L001_R2.fastq | samtools view -bS - | samtools sort - > P1/bam/lanes/P1_L001.bam
[E::bwa_set_rg] the read group line contained literal <tab> characters -- replace with escaped tabs: \t
You just have to espace the tab character so that snakemake does not interpret it:
{bwa} mem -M -t {threads} -R "#RG\\tID:{wildcards.sample}_{wildcards.unit}\\tSM:{wildcards.sample}" {bwa_index} {output.r1_o} {output.r2_o} | {samtools} view -bS - | {samtools} sort - > {output.bam}

read and set the value of variable from log file to KSH shell script

I have a log file (which is on output from running a python script)
The log file has the list of variables that I want to pass to a shell script. How do I accomplish this
Example
Log file has the following content. It has the variables x, y, z
Contents of file example.log:
2016-06-07 15:28:12.874 INFO x = (10, 11, 12)
2016-06-07 15:28:12.874 INFO y = case when id =1 then gr8 else ok end
2016-06-07 15:28:12.874 INFO z = 2016-06-07
I want the shell script to read the variables and use in the shell program
Sample shell
shell.ksh
Assign variables
var1 = read_value_x from example.log
var2 = read_value_y from example.log
Is there a generic shell function that I can use to read the log and parse the variable values
Thanks
PMV
Here's how you can do it reasonably efficiently in ksh, for smallish files:
# Read into variables $var1, $var2, ...
n=0
while IFS='=' read -r unused value; do
typeset "var$((++n))=${value# }"
done < example.log
# Enumerate the variables created.
# Equivalent to: `echo "$var1"`, `echo "$var2"`, ...
for (( i = 1; i <= n; ++i)); do
eval echo \"\$'var'$i\"
done
Read the log file, use regex to get the value after = on each line and assign to a variable in a loop.
var1=$(awk -F " = " '$1 ~ /[x]$/' < file.log)
var2=$(awk -F " = " '$1 ~ /[y]$/' < file.log)
The awk utility command above will use the delimiter " = " and using regex we check whether $1 is having x or y at the end; if it does, we set the value to relevant variable.
In case you want to set the 2nd part in variable
var1=$(awk -F " = " '$1 ~ /[x]$/{print $2}' < file.log)
var2=$(awk -F " = " '$1 ~ /[y]$/{print $2}' < file.log)

Creating an sql query using awk, bash, grep

I have been trying to parse a Paypal Email and insert the resultant info into a Database of mine. I have most of the code working but I cannot get a Variable to insert into my awk code to create the sql insert query.
if [ -f email-data.txt ]; then {
grep -e "Transaction ID:" -e "Receipt No: " email-data.txt \
>> ../temp
cat ../temp \
| awk 'NR == 1 {printf("%s\t",$NF)} NR == 2 {printf("%s\n",$NF)}' \
>> ../temp1
awk '{print $1}' $email-data.txt \
| grep # \
| grep -v \( \
| grep -v href \
>> ../address
email_addr=$(cat ../address)
echo $email_addr
cat ../temp1 \
| awk '{print "INSERT INTO users (email,paid,paypal_tran,CCReceipt) VALUES"; print "(\x27"($email_addr)"\x27,'1',\x27"$2"\x27,\x27"$3"\x27);"}' \
> /home/linux014/opt/post-new-member.sql
The output looks like the following
INSERT INTO users (email,paid,paypal_tran,CCReceipt) VALUES('9MU013922L4775929 9MU013922L4775929',1,'9MU013922L4775929','');
Should look like
INSERT INTO users (email,paid,paypal_tran,CCReceipt) VALUES('dogcat#gmail.com',1,'9MU013922L4775929','1234-2345-3456-4567');
(Names changed to protect the innocent)
The trial data I am using is set out below
Apr 18, 2014 10:46:17 GMT-04:00 | Transaction ID: 9MU013922L4775929
You received a payment of $50.00 USD from Dog Cat (dogcat#gmail.com)
Buyer:
Dog Cat
dogcat#gmail.com
Purchase Details
Receipt No: 1234-2345-3456-4567
I cannot figure out why the email-addr is not being inserted properly.
You are calling a shell variable inside awk. The right way to do that is by creating an awk variable using -v option.
For example, say $email is your shell variable, then
... | awk -v awkvar="$email" '{do something with awkvar}' ...
Read this for more details.
However, having said that, here is how I would try and parse the text file:
awk '
/Transaction ID:/ { tran = $NF }
/Receipt No:/ { receipt = $NF }
$1 ~ /#/ { email = $1 }
END {
print "INSERT INTO users (email,paid,paypal_tran,CCReceipt) VALUES";
print "("q email q","1","q tran q","q receipt q");"
}' q="'" data.txt
Output:
INSERT INTO users (email,paid,paypal_tran,CCReceipt) VALUES
('dogcat#gmail.com',1,'9MU013922L4775929','1234-2345-3456-4567');

from string to integer (scripts)

I have this snippet of the code:
set calls = `cut -d" " -f2 ${2} | grep -c "$numbers"`
set messages = `cut -d" " -f2 ${3} | grep -c "$numbers"`
# popularity = (calls * 3) + messages
and error
# expression syntax
what does it mean? grep -c returns number, am I wrong, thanks in advance
in $numbers I have list of numbers, 2 and 3 parameters also contain numbers
Try
# popularity = ($calls * 3) + $messages
The $ symbols are still needed to indicate variables.
See C-shell Cookbook