Subnetze und IP Adressen extrahieren aus SPF Records (z.B. Office365 oder Google Apps for Business)

Wenn man bei Office365 oder Google Apps for Business einen eigenen Mailserver (Postfix) vorschalten möchte beim versenden/empfangen muss man die Mailserver von Microsoft/Google Whitelisten in den mynetworks bei Postfix.

Das Script löst alle SPF Record includes auf und generiert CIDR Maps die sich in Postfix einbinden lassen.

Beispiel:

max@dev1:~$ python get_subnets_of_spf_record_mynetwoks.py
Working on job office365
Working on job google

Es werden 2 Files erzeugt:

max@dev1:~$ cat /etc/postfix/networks/google 
64.18.0.0/20 OK
64.233.160.0/19 OK
66.102.0.0/20 OK
66.249.80.0/20 OK
72.14.192.0/18 OK
74.125.0.0/16 OK
108.177.8.0/21 OK
173.194.0.0/16 OK
207.126.144.0/20 OK
209.85.128.0/17 OK
216.58.192.0/19 OK
216.239.32.0/19 OK
[2001:4860:4000::]/36 OK
[2404:6800:4000::]/36 OK
[2607:f8b0:4000::]/36 OK
[2800:3f0:4000::]/36 OK
[2a00:1450:4000::]/36 OK
[2c0f:fb50:4000::]/36 OK
172.217.0.0/19 OK
108.177.96.0/19 OK
max@dev1:~/test$ cat /etc/postfix/networks/office365
207.46.101.128/26 OK
207.46.100.0/24 OK
207.46.163.0/24 OK
65.55.169.0/24 OK
157.56.110.0/23 OK
157.55.234.0/24 OK
213.199.154.0/24 OK
213.199.180.0/24 OK
157.56.112.0/24 OK
207.46.51.64/26 OK
157.55.158.0/23 OK
64.4.22.64/26 OK
40.92.0.0/14 OK
40.107.0.0/17 OK
40.107.128.0/17 OK
134.170.140.0/24 OK
[2a01:111:f400::]/48 OK
23.103.128.0/19 OK
23.103.198.0/23 OK
65.55.88.0/24 OK
104.47.0.0/17 OK
23.103.200.0/21 OK
23.103.208.0/21 OK
23.103.191.0/24 OK
216.32.180.0/23 OK
94.245.120.64/26 OK
[2001:489a:2202::]/48 OK

In Posftix werden sie in der main.cf eingebunden:

# ----------------------------------------------------------------------
# My Networks
# ----------------------------------------------------------------------
mynetworks =
        cidr:/etc/postfix/networks/local
        cidr:/etc/postfix/networks/other
        cidr:/etc/postfix/networks/google
        cidr:/etc/postfix/networks/office365

Da sich zwischendurch die Records auch mal ändern können empfiehlt es sich einen Cronjob dafür einzurichten. Ich habe eine Variante mit diff die nur patcht wenn das Resultat nicht null ist.

Das Script lässt sich auch noch für andere Dienste / etc. anpassen:

lookup_spf = {
# Google Apps for Business
"google": {
          "domain": "google.com",
          "file"  : "/etc/postfix/networks/google",
          },

# Office365
"office365": {
          "domain": "spf.protection.outlook.com",
          "file"  : "/etc/postfix/networks/office365",
          },

# Example
"example": {
          "domain": "example.com",
          "file"  : "/etc/postfix/networks/example",
          },

}

Sourcecode:

#!/usr/bin/env python

#
# get_subnets_of_spf_record_mynetwoks.py
# Resolve all known ip addresses from spf record and generate cidr map for postfix
#
# Version 1.0
# Written by Maximilian Thoma (http://www.lanbugs.de)
#
# The generated files can be used in postfix config with for example mynetworks = cidr:/etc/postfix/<generated_file>
#
# This program is free software; you can redistribute it and/or modify it under the terms of the
# GNU General Public License as published by the Free Software Foundation;
# either version 2 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program;
# if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, 
# MA 02110, USA
#

#
# Requirements:
# dnspython module  -> pip install dnspython
#

import dns.resolver
from dns.exception import DNSException
import re
import sys

# Look for DNS Record at:
#
# "jobname": {
#            "domain": "domainname",
#            "file": "output_file",
#            }
#

# 

lookup_spf = {
# Google Apps for Business
"google": {
          "domain": "google.com",
          "file"  : "/etc/postfix/networks/google",
          },

# Office365
"office365": {
          "domain": "spf.protection.outlook.com",
          "file"  : "/etc/postfix/networks/office365",
          },
}

############################################################################################

def getspf(record, filehandler):
    # Init Resolver
    myResolver = dns.resolver.Resolver()

    try:
        # Try to lookup TXT record
        myAnswer = myResolver.query(record,"TXT")

    except DNSException:
        sys.stderr.write("Failed to query record, SPF broken.")
        return

    results = []

    for rdata in myAnswer:
        # Get string out of records
        for txt_string in rdata.strings:
            # Append to SPF Records buffer if "spf" in string
            if "spf" in txt_string:
                results.append(txt_string)

    # If results >=1
    if len(results) >= 1:
        # Work on records
        for spf in results:
            # Split parts
            parts = spf.split(" ")
            # Check parts
            for part in parts:

                s_include = re.match(r"^include:(?P<domain>.*)$", part)
                s_ip4 = re.match(r"^ip4:(?P<ip4>.*)$", part)
                s_ip6 = re.match(r"^ip6:(?P<ip6>.*)$", part)

                # If in part "include" found, next round
                if s_include:
                    getspf(s_include.group('domain'), filehandler)
                # elif ip4 found
                elif s_ip4:
                    filehandler.write(s_ip4.group('ip4') + " OK\n")
                # elif ip6 found
                elif s_ip6:
                    filehandler.write("[" + s_ip6.group('ip6').replace("/","]/") + " OK\n")
                # else no valid record
                else:
                    pass
    # no results 
    else:
        sys.stderr.write("No results")
        pass

def main():
    # Working on jobs
    for jobname, config in lookup_spf.iteritems():

        print "Working on job %s" % jobname

        # open file
        filehandler = open(config['file'], 'w')
        # start query spf records
        getspf(config['domain'], filehandler)
        # close file
        filehandler.close()


#getspf(lookup_spf)

if __name__ == "__main__":
    main()

 

Python: Snippet – Python Code aus Textdateien ausführen/importieren

Check_MK speichert alle Daten in einfachen Dateien direkt als ausführbaren Python Code.

Um die abgelegten Dictionarys etc. in seinen eigenen Skripten weiterverwenden zu können kann man sich die mit eval() oder exec() laden.

eval() kann verwendet werden um z.B.  ein Dictionary in eine Variable zu laden, exec() kann auch ganze Funktionen etc. laden.

Beispiel eval():

dict.txt

{"foo":"bar","aaa":"bbb"}

import_dict.py

#!/usr/bin/env python

with open("dict1.txt","r") as f:
    x = eval(f.read().replace("\n",""))

print x

print x['foo']

Ergebnis:

max@cmkdevel:~/dev$ python import_dict.py
{'foo': 'bar', 'aaa': 'bbb'}
bar

Beispiel exec()

code.txt

max['foo'] = {
             "foo": "bar",
             "fxx": "boo",
             }


def hello(name):
    print "Hallo " + name

import_code.py

#!/usr/bin/env python

max = {}

with open("code.txt","r") as f:
    exec(f.read())


print max
print max['foo']
print max['foo']['foo']
hello("max")

Ergebnis:

max@cmkdevel:~/dev$ python import_code.py 
{'foo': {'foo': 'bar', 'fxx': 'boo'}}
{'foo': 'bar', 'fxx': 'boo'}
bar
Hallo max

 

 

Python: Snippet – Speichern und weiterverwenden von Objekten

Mit dem Modul pickle (deutsch pökeln, konservieren) bietet Funktionen für das Speichern von Objekten. Die gespeicherten Objekte können wiederhergestellt werden. Die Daten werden als Byte Stream gespeichert.

Folgende Datentypen werden unterstützt:

Hier ein kleines Beispiel:

dict_to_file.py

#!/usr/bin/env python

import pickle

t = {
"sname": "Foo",
"lname": "Bar",
"street": "Foo Street",
"city": "Bar City"
}

with open("test.pkl","wb") as f:
    pickle.dump(t, f, pickle.HIGHEST_PROTOCOL)

file_to_dict.py

#!/usr/bin/env python

import pickle
import pprint

with open("test.pkl","rb") as f:
    t = pickle.load(f)

pprint.pprint(t)

Ausgabe von file_to_dict.py

max@cmkdevel:~$ python file_to_dict.py 
{'city': 'Bar City', 'lname': 'Bar', 'sname': 'Foo', 'street': 'Foo Street'}

 

Python: Snippet – Suchen und ersetzen in Dateien

Der Titel des Posts sagt eigentlich schon alles 😉

Python 3:

#!/usr/bin/env python3

import fileinput
import re

file = fileinput.FileInput("/etc/ssh/sshd_config", inplace=True, backup=".bak")

for line in file:
    line = re.sub(r".*Banner.*","Banner /etc/issue.net", line)
    print(line, end='')

file.close()

Python 2:

#!/usr/bin/env python

import fileinput
import re
import sys

file = fileinput.FileInput("/etc/ssh/sshd_config", inplace=True, backup=".bak")

for line in file:
    line = re.sub(r".*Banner.*","Banner /etc/issue.net", line)
    sys.stdout.write(line)

file.close()

 

Python: Snippet – Threading mit Result

Code-Snippet:

#!/usr/bin/env python

import socket
from multiprocessing.pool import ThreadPool
import pprint


jobs = ("www.heise.de","www.google.com","www.golem.de","www.google.de","www.lanbugs.de","www.microsoft.com")

def worker(domain):
    print socket.gethostbyname(domain)
    return socket.gethostbyname(domain)

pool = ThreadPool(processes=3)

result_buffer = {}

for d in jobs:
    print "start " + d
    async_result = pool.apply_async(worker, args=(d,))
    result_buffer[d]=async_result.get()


pprint.pprint(result_buffer)


Ausgabe:

>python thread_with_result.py 
start www.heise.de
193.99.144.85
start www.google.com
172.217.20.68
start www.golem.de
109.68.230.138
start www.google.de
172.217.20.99
start www.lanbugs.de
81.169.181.94
start www.microsoft.com
104.108.168.41
{'www.golem.de': '109.68.230.138',
 'www.google.com': '216.58.207.68',
 'www.google.de': '172.217.20.99',
 'www.heise.de': '193.99.144.85',
 'www.lanbugs.de': '81.169.181.94',
 'www.microsoft.com': '104.108.168.41'}

 

Python: Oracle DB Modul für Python für CentOS6

Quelle: https://gist.github.com/hangtwenty/5547377

#!/bin/bash

# INSTALL ORACLE INSTANT CLIENT #
#################################

# NOTE: Oracle requires at least 1176 MB of swap (or something around there).
# If you are using CentOS in a VMWare VM, there's a good chance that you don't have enough by default.
# If this describes you and you need to add more swap, see the
# "Adding a Swap File to a CentOS System" section, here:
# http://www.techotopia.com/index.php/Adding_and_Managing_CentOS_Swap_Space

# Install basic dependencies
sudo yum -y install libaio bc flex

echo "Now go get some the following two RPMs ..."
echo "- basic: oracle-instantclient11.2-basic-11.2.0.3.0-1.x86_64.rpm"
echo "- SDK/devel: oracle-instantclient11.2-devel-11.2.0.3.0-1.x86_64.rpm"
echo "... from this URL: http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html"
echo "WARNING: It's pretty annoying, they make you sign up for an Oracle account, etc."
echo 'I will assume you have put these two files are into ~/Downloads'
echo "Press any key once you're ready" && read -n 1 -s

sudo rpm -ivh ~/Downloads/oracle-instantclient11.2-basic-*
sudo rpm -ivh ~/Downloads/oracle-instantclient11.2-devel-*

# SET ENVIRONMENT VARIABLES #
#############################

# Source for this section: http://cx-oracle.sourceforge.net/BUILD.txt

# (SIDENOTE: I had to alter it by doing some digging around for where the Oracle RPMs really installed to;
# if you ever need to do this, do a command like this:
#     rpm -qlp <rpm_file_of_concern.rpm>)

echo '# Convoluted undocumented Oracle bullshit.' >> $HOME/.bashrc
echo 'export ORACLE_VERSION="11.2"' >> $HOME/.bashrc
echo 'export ORACLE_HOME="/usr/lib/oracle/$ORACLE_VERSION/client64/"' >> $HOME/.bashrc
echo 'export PATH=$PATH:"$ORACLE_HOME/bin"' >> $HOME/.bashrc
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:"$ORACLE_HOME/lib"' >> $HOME/.bashrc
. $HOME/.bashrc

# INSTALL cx_Oracle #
#####################

pip install cx_Oracle

Gute Anleitung zur Verwendung von cs_Oracle: http://www.oracle.com/technetwork/articles/dsl/prez-python-queries-101587.html

Beispiel:

import cx_Oracle

# db helper named arrays
def rows_to_dict_list(cursor):
    columns = [i[0] for i in cursor.description]
    return [dict(zip(columns, row)) for row in cursor]

# Connect to DB
dsn_tns = cx_Oracle.makedsn("10.10.10.1",1521,"TESTDB")
db = cx_Oracle.connect("testuser","password",dsn_tns)
cursor = db.cursor()

# Get data from DB
cursor.execute("SELECT * FROM test_tab")
result = rows_to_dict_list(cursor)

# Insert to DB
cursor.execute('INSERT INTO test_tab (row1, row2, row2) VALUES ("xxx", "yyy", "zzz")')
db.commit()

# close db
db.close()

 

Python: Snippet Argumente für Command Line Tools mit getopt oder argparse

Mein persönlicher Favorit ist argparse, der Vollständigkeit halber aber beide Lösungen. getopt und argpase sind beide bei Python dabei und müssen nicht nachinstalliert werden.

GETOPT Ansatz:

#!/usr/bin/env python

import getopt
import sys

def usage():
    print "test1.py - A test script.\n" \
          " -p, --print Return a string \n" \
          " -h, --help Help"


def main():
    try:
        opts, args = getopt.getopt(sys.argv[1:], "p:h", ['print=', 'help'])

    except getopt.GetoptError as err:
        print str(err)
        sys.exit(2)

    for o, a in opts:
        if o in ('-p', '--print'):
            string_to_print = a

        if o in ('-h', '--help'):
            usage()
            sys.exit(2)

    if not 'string_to_print' in locals():
        print "-p or --print is not given or string is missing\n"
        usage()
        sys.exit(2)

    print string_to_print


if __name__ == "__main__":
    main()

ARGPARSE Ansatz:

#!/usr/bin/env python

import argparse

def main():
        parser = argparse.ArgumentParser(description="test3.py - A test script.")
        parser.add_argument('-p','--print',dest='string_to_print', required=True, help="String to print")
        args = parser.parse_args()

        print args.string_to_print

if __name__ == "__main__":
        main()

 

Python: Snippet Multiprocessing

Wenn es möglich ist Jobs zu parallelisieren kann man Multiprocessing unter Python verwenden.

#!/usr/bin/env python

import os
from multiprocessing import Pool



def worker(job):
    x, y = job

    result = x ** y

    if hasattr(os, 'getppid'):
        print "parent process pid:", os.getppid()
    print "process pid:", os.getpid()

    print "result is: ", result
    print "---"


if __name__ == '__main__':
    jobs = [(1, 2), (3, 4), (5, 6), (11, 12), (13, 14), (15, 16), (21, 22), (23, 24), (25, 26)]
    pool = Pool(processes=5)

    for job in jobs:
        pool.apply_async(worker, args=(job,))

    pool.close()
    pool.join()

Result:

max@cmkdev:~$ python mp.py 
parent process pid: 19599
process pid: 19600
result is:  1
---
parent process pid: 19599
process pid: 19601
result is:  81
---
parent process pid: 19599
process pid: 19602
result is:  15625
---
parent process pid: 19599
process pid: 19602
result is:  3138428376721
---
parent process pid: 19599
process pid: 19600
result is:  6568408355712890625
---
parent process pid: 19599
process pid: 19600
result is:  122694327386105632949003612841
---
parent process pid: 19599
process pid: 19600
result is:  480250763996501976790165756943041
---
parent process pid: 19599
process pid: 19602
result is:  2220446049250313080847263336181640625
---
parent process pid: 19599
process pid: 19604
result is:  3937376385699289
---

 

Python: Snippet – In einer Datei suchen und Zeilennummern zurückgeben

test.txt in der Gesucht wird nach foobar:

wer
w
erw
erwer
foobar
sfsdfhsdkjfhkjsdf
sdf
sdf
sdf
sdf
sdf
sdflskdjflsdjflksjflksjf
sdfkjsdfjkhskjhffoobardjskfhskdjhfkjsdhfkjshdf
sflksdjfjklsdfjs
dfs
dfs
df
sdf
sdf
dsf

Testscript zum Suchen:

#!/usr/bin/env python

filename = 'test.txt'
search = 'foobar'

with open(filename) as f:
    for num, line in enumerate(f, 1):
        if search in line:
            print '%s - found at line:' % search, num



Resultat:

dev1@cmkdev1:/home/dev1$ python test.py 
foobar - found at line: 5
foobar - found at line: 13

 

Python: Cisco Config grabber – Suchen von Interface Configs & Erzeugen von neuer Config

Das Script Snippet dient dazu in den Backupfoldern z.B. vom 20.09.2016 nach Interfaces mit einer bestimmten Description und einem gesetzten Parameter z.B. service-policy zu suchen und automatisch Konfiguration zu erzeugen die diese Config entfernt.

Configbeispiel Cisco Config  (swt70/20.09.2016/swt70-config.txt):

interface GigabitEthernet1/0/47
 description acp10 
 switchport access vlan 17
 switchport mode access
 switchport nonegotiate
 spanning-tree guard loop
 service-policy input ACCESSPOINT
!
interface GigabitEthernet1/0/48
 description acp43 
 switchport access vlan 17
 switchport mode access
 switchport nonegotiate
 spanning-tree guard loop
 service-policy input ACCESSPOINT
!
Das Script benötigt CiscoConfParse und es kann mit „pip install ciscoconfparse“ installiert werden.
#!/usr/bin/python

import os
from ciscoconfparse import CiscoConfParse
import re

buffer = []
buffer_f = []

# Folderstruktur: <switchname>/<backup_datum>/config.txt
for dir in os.listdir("."):
        buffer.append("./"+dir+"/20.09.2016/")

# Files zusammensammeln
for x in buffer:
        try:
                for f in os.listdir(x):
                        buffer_f.append(x+f)
        except:
                pass


# Alle files abarbeiten
for f in buffer_f:
  # Parser laden
  parse = CiscoConfParse(f)
  
  # Interfaces finden in config
  all_intfs = parse.find_objects(r"^interf")
  intfs = list()
  fault_intfs = list()
  # in den interfaces nach description .+acp.+ suchen
  for obj in all_intfs:
    if obj.re_search_children(r"description.+acp.+|description.+ACP.+"):
      intfs.append(obj)
  
  # fuer alle Suchergebnisse description .+acp.+ suche nach service-policy
  for i in intfs:
    
    if i.re_search_children(r"service-policy.+"):
      sp = i.re_search_children(r"service-policy.+")
      
      fault_intfs.append((i, sp[0].text) )
  
  # wenn eine zeile im fault buffer ist 
  if len(fault_intfs) > 0:
    
    # switchnamen aus dateinamen extrahieren
    n = re.search(r"^.+(swt[0-9][0-9]).+|^.+(SWT[0-9][0-9]).+",f)
    if n.group(1) is None:
      print n.group(2)
    else:
      print n.group(1)
    
    # no statements erzeugen
    for i, no_cmd in fault_intfs:
      print "!"
      print i.text
      print "no "+no_cmd
      
    print "! ---"
  else:
    pass

Das Script erzeugt folgenden Output:

swt70
!
interface GigabitEthernet1/0/47
no  service-policy input ACCESSPOINT
!
interface GigabitEthernet1/0/48
no  service-policy input ACCESSPOINT
! ---

 

Python: Einfacher IMAP Client

Die Überschrift sagt eigentlich schon alles. Das Code Snippet dient dazu von einem IMAP Server Mails zu laden und mit dem Payload Dinge anzustellen.

#!/usr/bin/env python

import imaplib
import sys
import email
import re
import email.header
import base64
from HTMLParser import HTMLParser

username = "blah@blah.de"
password = "xxxxxx"

imap_rz1 = "imap.xxx.de"
imap_rz2 = "imap-rz2.xxxxx.de"


class MLStripper(HTMLParser):
    def __init__(self):
        self.reset()
        self.fed = []
    def handle_data(self, d):
        self.fed.append(d)
    def get_data(self):
        return ''.join(self.fed)

def strip_tags(html):
    s = MLStripper()
    s.feed(html)
    return s.get_data()




# Try primary server
try:
    M = imaplib.IMAP4(imap_rz1)

except:

    sys.stderr.write("Try second server, primary not reachable.\n")
    # Try secondary server
    try:
        M = imaplib.IMAP4(imap_rz2)

    except:
        sys.stderr.write("Error no connection possible.\n")
        sys.exit(1)

# Try to Login
try:
    M.login(username, password)

except:
    sys.stderr.write("Error login.\n")
    sys.exit(1)

# DATA
M.select()
typ, data = M.search(None, 'ALL')

for num in data[0].split():
    typ, data = M.fetch(num, '(RFC822)')

    #### print 'Message %s\n%s\n' % (num, data[0][1])
    msg = email.message_from_string(data[0][1])
    hdr = email.header.make_header(email.header.decode_header(msg['Subject']))

    if msg.is_multipart():
        print "Multipart Mail"
        for payload in msg.get_payload():
            print payload.get_payload()
    else:

        enc = msg['Content-Transfer-Encoding']

        if enc == "base64":
            dirty = msg.get_payload()
            payload = base64.decodestring(dirty)
            print payload
            #print strip_tags(payload)
            pmail = parse_mail(mailcontent=strip_tags(payload).rstrip().lstrip().splitlines())
        else:
            print msg.get_payload()
            #print strip_tags(msg.get_payload())
            pmail = parse_mail(mailcontent=strip_tags(msg.get_payload()).rstrip().lstrip().splitlines())
            
    # Set to deleted
    M.store(num, '+FLAGS', '\\Deleted')

# Delete permanently
M.expunge()
# Close current mailbox
M.close()
# Logout
M.logout()

Python Version von getadsmtp.pl

Übersetzung des AD Mailadressen Sammelskripts von Perl nach Python.

#!/usr/bin/python

# getadsmtp.py
# Version 1.0
# The script is an translation from the orginal perl script getadsmtp.pl

# This script will pull all users' SMTP addresses from your Active Directory
# (including primary and secondary email addresses) and list them in the
# format "user@example.com OK" which Postfix uses with relay_recipient_maps.
# Be sure to double-check the path to python above.

# This requires python-ldap to be installed.  To install python-ldap on debian based systems,
# at a shell type "apt-get install python-ldap" or "sudo apt-get install python-ldap"

import os, sys, ldap

# Enter the path/file for the output
valid_addresses = "/etc/postfix/example_recipients"

# Enter the FQDN of your Active Directory domain controllers below
dc1="dc01.example.com"
dc2="dc02.example.com"

# Enter the LDAP container for your userbase.
# The syntax is CN=Users,dc=example,dc=com
# This can be found by installing the Windows 2000 Support Tools
# then running ADSI Edit.
# In ADSI Edit, expand the "Domain NC [domaincontroller1.example.com]" &amp;
# you will see, for example, DC=example,DC=com (this is your base).
# The Users Container will be specified in the right pane as
# CN=Users depending on your schema (this is your container).
# You can double-check this by clicking "Properties" of your user
# folder in ADSI Edit and examining the "Path" value, such as:
# LDAP://domaincontroller1.example.com/CN=Users,DC=example,DC=com
# which would be hqbase="cn=Users,dc=example,dc=com"
# Note:  You can also use just hqbase="dc=example,dc=com"
hqbase="cn=Users,dc=example,dc=com"

# Enter the username &amp; password for a valid user in your Active Directory
# with username in the form cn=username,cn=Users,dc=example,dc=com
# Make sure the user's password does not expire.  Note that this user
# does not require any special privileges.
# You can double-check this by clicking "Properties" of your user in
# ADSI Edit and examining the "Path" value, such as:
# LDAP://domaincontroller1.example.com/CN=user,CN=Users,DC=example,DC=com
# which would be $user="cn=user,cn=Users,dc=example,dc=com"
# Note: You can also use the UPN login: "user@example.com"
user="cn=user,cn=Users,dc=example,dc=com"
passwd="password"

try:
  l = ldap.initialize("ldap://%s" %(dc1))
  l.set_option(ldap.OPT_REFERRALS, 0)
  l.protocol_version = 3
  l.simple_bind_s(user, passwd)

except ldap.LDAPError, e:
  try:
    l = ldap.initialize("ldap://%s" %(dc2))
    l.set_option(ldap.OPT_REFERRALS, 0)
    l.protocol_version = 3
    l.simple_bind_s(user, passwd)

  except ldap.LDAPError, e:
    print "Error connecting to specified domain controllers\n"
    sys.exit()

# Play around with this to grab objects such as Contacts, Public Folders, etc.
# A minimal filter for just users with email would be:
# filter = "(&amp;(sAMAccountName=*)(mail=*))"
filter = "(&amp; (mailnickname=*) (| (&amp;(objectCategory=person)(objectClass=user)(!(homeMDB=*))(!(msExchHomeServerName=*)))(&amp;(objectCategory=person)(objectClass=user)(|(homeMDB=*)(msExchHomeServerName=*)))(&amp;(objectCategory=person)(objectClass=contact))(objectCategory=group)(objectCategory=publicFolder)(objectClass=msExchDynamicDistributionList) ))"

attrs = ["proxyAddresses"]
scope = ldap.SCOPE_SUBTREE

r = l.search(hqbase, scope, filter, attrs)
type,a = l.result(r)
result_set = []

for x in a:
  name,attrs = x
  if hasattr(attrs, 'has_key') and attrs.has_key('proxyAddresses'):
    proxyAddresses = attrs['proxyAddresses']
    for y in proxyAddresses:
      result_set.append("%s OK" %(y.replace("smtp:","").replace("SMTP:","")))

# Add additional restrictions, users, etc. to the output file below.
#result_set.append("user@example.com OK")
#result_set.append("user1@example.com 550 User unknown.")
#result_set.append("bad.example.com 550 User does not exist.")

#######################################################################
# Build file ...
output = file(valid_addresses,'w')

for line in result_set:
  output.write("%s\n" %(line))

output.close()