initial commit

This commit is contained in:
Shane Peters
2018-11-10 13:26:49 -05:00
parent 57cbadbe8a
commit 525ecff602
21 changed files with 3049 additions and 0 deletions

104
.gitignore vendored Normal file
View File

@@ -0,0 +1,104 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
.static_storage/
.media/
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/

26
README Normal file
View File

@@ -0,0 +1,26 @@
Configure Kafka topics (run on one kafka node)
doc/kafka_topics.sh
Initialize elasticsearch:
curl -X PUT 'http://<elasticsearch>:9200/threatline' -d@doc/es_mapping.json
Install service file (FreeBSD):
cp doc/threatline /usr/local/etc/rc.d/threatline
Enable threatline:
sysrc threatline_enable=YES
sysrc threatline_agents="normalize enrich check archive"
Start threatline:
service threatline start
Monitor logs:
tail -f /tmp/tl_worker.log
Stages:
Normalize: Touch-up/rename fields, etc.
Enrich: Enrich and part of the message.
Check: Checks parts of message (now enriched) against known bad stuff.
Archive: Push document into elasticsearch. Can also log to file.

BIN
doc/APACHE_KAFKA.tgz Normal file

Binary file not shown.

2117
doc/es_mapping.json Normal file

File diff suppressed because it is too large Load Diff

11
doc/kafka_topics.sh Normal file
View File

@@ -0,0 +1,11 @@
#!/bin/sh
KBIN=/usr/local/share/java/kafka/bin
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLINGEST-lodi --partitions=3 --replication-factor=1
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLINGEST-wyomic --partitions=3 --replication-factor=1
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLINGEST-qwf --partitions=3 --replication-factor=1
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLNORMALIZED --partitions=3 --replication-factor=1
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLENRICHED --partitions=3 --replication-factor=1
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLALERTS --partitions=3 --replication-factor=1
${KBIN}/kafka-topics.sh --zookeeper 10.15.0.38:2181 --create --topic TLARCHIVE --partitions=3 --replication-factor=1

101
doc/setup_pfsense.sh Normal file
View File

@@ -0,0 +1,101 @@
# FreeBSD 11.1-RELEASE
# Install dependencies
pkg install -y bash git flex bison cmake libpcap librdkafka python py27-sqlite3 caf swig
# Compile Bro (no install)
# Needs compiled because build/src/bifcl is needed to compile plugins
mkdir /usr/local/src; cd /usr/local/src/
git clone https://github.com/bro/bro
cd bro; ./configure && make -j2
# Compile kafka plugin (no install)
# This will generate APACHE_KAFKA.tar.gz
cd /usr/local/src/
git clone https://github.com/apache/metron-bro-plugin-kafka.git
./configure --bro-dist=/usr/local/src/bro
make
# Copy APACHE_KAFKA.tgz to pfsense
# Login into pfsense and enable FreeBSD repos (temporarily)
sed -i '' 's/FreeBSD: { enabled: no/FreeBSD: { enabled: yes/g' /usr/local/share/pfSense/pkg/repos/pfSense-repo.conf
sed -i '' 's/FreeBSD: { enabled: no/FreeBSD: { enabled: yes/g' /usr/local/etc/pkg/repos/FreeBSD.conf
pkg install -y bro librdkafka
sed -i '' 's/FreeBSD: { enabled: yes/FreeBSD: { enabled: no/g' /usr/local/share/pfSense/pkg/repos/pfSense-repo.conf
sed -i '' 's/FreeBSD: { enabled: yes/FreeBSD: { enabled: no/g' /usr/local/etc/pkg/repos/FreeBSD.conf
pkg update
# Extract plugin and enable it
tar xzf APACHE_KAFKA.tgz -C /usr/local/lib/bro/plugins
cat > /usr/local/share/bro/site/local.bro <<EOF
@load misc/loaded-scripts
@load tuning/defaults
@load misc/capture-loss
@load misc/stats
@load misc/scan
@load frameworks/software/vulnerable
@load frameworks/software/version-changes
@load-sigs frameworks/signatures/detect-windows-shells
@load protocols/ftp/software
@load protocols/smtp/software
@load protocols/ssh/software
@load protocols/http/software
@load protocols/ftp/detect
@load protocols/conn/known-hosts
@load protocols/conn/known-services
@load protocols/ssl/known-certs
@load protocols/ssl/validate-certs
@load protocols/ssl/log-hostcerts-only
@load protocols/ssh/geo-data
@load protocols/ssh/detect-bruteforcing
@load protocols/ssh/interesting-hostnames
@load frameworks/files/hash-all-files
@load frameworks/files/detect-MHR
@load policy/protocols/conn/vlan-logging
@load policy/protocols/conn/mac-logging
@load policy/protocols/smb
@load Apache/Kafka/logs-to-kafka.bro
redef Kafka::topic_name = "TLINGEST-CLIENTNAME";
redef Kafka::tag_json = T;
redef Kafka::logs_to_send = set(CaptureLoss::LOG, PacketFilter::LOG, Stats::LOG, Conn::LOG, DHCP::LOG, DNS::LOG, FTP::LOG, HTTP::LOG, IRC::LOG, KRB::LOG, NTLM::LOG, RADIUS::LOG, RDP::LOG, SIP::LOG, SMB::CMD_LOG, SMB::FILES_LOG, SMB::MAPPING_LOG, SMTP::LOG, SNMP::LOG, SOCKS::LOG, SSH::LOG, SSL::LOG, Syslog::LOG, Tunnel::LOG, Files::LOG, PE::LOG, X509::LOG, Intel::LOG, Notice::LOG, Software::LOG, Weird::LOG, CaptureLoss::LOG);
redef Kafka::kafka_conf = table(["client.id"] = "setme.client"
, ["compression.codec"] = "lz4"
, ["request.required.acks"] = "0"
, ["metadata.broker.list"] = "10.15.0.40:9092,10.15.0.41:9092,10.15.0.42:9092"
);
EOF
cat > /usr/local/etc/node.cfg <<EOF
[logger]
type=logger
host=localhost
[manager]
type=manager
host=localhost
[proxy-1]
type=proxy
host=localhost
[worker-1]
type=worker
host=localhost
interface=igb1
EOF
cat > /usr/local/etc/broctl.cfg <<EOF
MailTo = root@localhost
MailConnectionSummary = 0
MinDiskSpace = 0
MailHostUpDown = 0
LogRotationInterval = 3600
LogExpireInterval = 0
StatsLogEnable = 1
StatsLogExpireInterval = 1
StatusCmdShowAll = 0
CrashExpireInterval = 1
SitePolicyScripts = local.bro
LogDir = /usr/local/logs
SpoolDir = /usr/local/spool
CfgDir = /usr/local/etc
EOF
broctl check

51
doc/threatline Normal file
View File

@@ -0,0 +1,51 @@
# PROVIDE: threatline
# REQUIRE: LOGIN
# KEYWORD: shutdown
# Add the following lines to /etc/rc.conf to enable threatline:
#
# threatline_enable="YES"
#
#
. /etc/rc.subr
name="threatline"
rcvar=threatline_enable
# read configuration and set defaults
load_rc_config "$name"
: ${threatline_enable="NO"}
: ${threatline_agents="normalize enrich check archive"}
: ${threatline_pidfile="/tmp/threatline.pid"}
: ${threatline_path="/usr/local/threatline/threatline/threatline.py"}
start_cmd="threatline_start"
stop_cmd="threatline_stop"
daemon_head=/usr/sbin/daemon
python_path=/usr/local/bin/python2.7
threatline_start()
{
if checkyesno ${rcvar}; then
echo "* starting threatline... "
$daemon_head -p $threatline_pidfile $python_path $threatline_path $threatline_agents
fi
}
threatline_stop()
{
if checkyesno ${rcvar}; then
echo "* stopping threatline... "
#pkill python
kill `ps ax | awk '/threatline/{print $1}'` 2>/dev/null
fi
}
threatline_restart()
{
threatline_stop
threatline_start
}
run_rc_command "$1"

7
requirements.txt Normal file
View File

@@ -0,0 +1,7 @@
elasticsearch
kafka-python
lz4
ipwhois
ipaddress
requests
netaddr

11
setup.py Normal file
View File

@@ -0,0 +1,11 @@
#!/usr/bin/env python
from distutils.core import setup
setup(
name='ThreatLine',
version='0.1',
packages=['threatline'],
license='Creative Commons Attribution-Noncommercial-Share Alike license',
long_description=open('README.txt').read(),
)

View File

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python
# Archiving handler
import sys
from base import BaseHandler
from utils.archive_utils import ElasticLogger
class Archive(BaseHandler):
def handle_conn(self, message):
self.archiver.send(message)
def handle_dhcp(self, message):
self.archiver.send(message)
def handle_dce_rpc(self, message):
self.archiver.send(message)
def handle_known_devices(self, message):
self.archiver.send(message)
def handle_dns(self, message):
self.archiver.send(message)
def handle_files(self, message):
self.archiver.send(message)
def handle_http(self, message):
self.archiver.send(message)
def handle_notice(self, message):
self.archiver.send(message)
def handle_smtp(self, message):
self.archiver.send(message)
def handle_snmp(self, message):
self.archiver.send(message)
def handle_software(self, message):
self.archiver.send(message)
def handle_ssh(self, message):
self.archiver.send(message)
def handle_ssl(self, message):
self.archiver.send(message)
def handle_stats(self, message):
self.archiver.send(message)
def handle_syslog(self, message):
self.archiver.send(message)
def handle_weird(self, message):
self.archiver.send(message)
def handle_x509(self, message):
self.archiver.send(message)
def handle_intel(self, message):
self.archiver.send(message)
def handle_capture_loss(self, message):
self.archiver.send(message)
def handle_communication(self, message):
self.archiver.send(message)
def handle_ntlm(self, message):
self.archiver.send(message)
def handle_pe(self, message):
self.archiver.send(message)
def handle_smb_files(self, message):
self.archiver.send(message)
def handle_smb_mapping(self, message):
self.archiver.send(message)
def handle_tunnel(self, message):
self.archiver.send(message)
def handle_rdp(self, message):
self.archiver.send(message)
def __init__(self):
super(Archive, self).__init__()
self.mymod = sys.modules[__name__].Archive
self.settings = {
'consumer_topic': 'TLARCHIVE',
'consumer_group': 'archive_group',
'producer_topic': 'TLDONE',
'producer_client_id': 'archiver-',
'dispatchers': {} # will be generated at initialize()
}
# self.archiver = FileLogger('threatline.log')
self.archiver = ElasticLogger()

100
threatline/handlers/base.py Normal file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python
# Ingestion handler
import sys
from functools import partial
class BaseHandler(object):
def __init__(self):
self.settings = {}
self.dispatch = {}
self.mymod = None
def handle_conn(self, message):
return message
def handle_dce_rpc(self, message):
return message
def handle_known_devices(self, message):
return message
def handle_dhcp(self, message):
return message
def handle_dns(self, message):
return message
def handle_files(self, message):
return message
def handle_http(self, message):
return message
def handle_notice(self, message):
return message
def handle_smtp(self, message):
return message
def handle_snmp(self, message):
return message
def handle_software(self, message):
return message
def handle_ssh(self, message):
return message
def handle_ssl(self, message):
return message
def handle_stats(self, message):
return message
def handle_syslog(self, message):
return message
def handle_weird(self, message):
return message
def handle_x509(self, message):
return message
def handle_intel(self, message):
return message
def handle_capture_loss(self, message):
return message
def handle_communication(self, message):
return message
def handle_ntlm(self, message):
return message
def handle_pe(self, message):
return message
def handle_smb_files(self, message):
return message
def handle_smb_mapping(self, message):
return message
def handle_tunnel(self, message):
return message
def handle_rdp(self, message):
return message
def initialize(self):
for lm in dir(self.mymod):
if lm.startswith('handle_'):
name = lm.replace('handle_', '')
# Bind each method found, to this instance (self)
self.dispatch[name] = partial(getattr(self.mymod, lm), self)
self.settings['dispatchers'] = self.dispatch

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env python
# Check handler
import sys
from base import BaseHandler
from utils.check_utils import Checker
class Check(BaseHandler):
def check_ioc(self, dtype, data):
if dtype == 'domain':
return self.checker.check_domain(data)
else:
return {}
def handle_dns(self, message):
if len(message['query']) == 0:
return message
domain = message['query']
message['alert'] = self.check_ioc('domain', domain)
return message
def __init__(self):
super(Check, self).__init__()
self.mymod = sys.modules[__name__].Check
self.checker = Checker()
self.settings = {
'consumer_topic': 'TLENRICHED',
'consumer_group': 'check_group',
'producer_topic': 'TLARCHIVE',
'alert_topic': 'TLALERTS',
'producer_client_id': 'checker-',
'dispatchers': {} # will be generated at initialize()
}

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env python
# Enrichment handler
import sys
import logging
from base import BaseHandler
from functools import partial
from utils.enrich_utils import IPClient, mac_lookup
class Enrich(BaseHandler):
def handle_conn(self, message):
message['enrichment'] = {}
if message['local_orig']: # If True, means conn originated locally
e = self.ipcli.enrichip(message['id.resp_h'])
else:
e = self.ipcli.enrichip(message['id.orig_h'])
if e:
message['enrichment']['ip'] = e
return message
def handle_known_devices(self, message):
message['enrichment'] = {}
ret = mac_lookup(message['mac'])
if not ret:
ret = {'note': 'Not Registered!'}
message['enrichment']['mac'] = ret
return message
def __init__(self):
super(Enrich, self).__init__()
self.ipcli = IPClient()
self.mymod = sys.modules[__name__].Enrich
self.settings = {
'consumer_topic': 'TLNORMALIZED',
'consumer_group': 'enrich_group',
'producer_topic': 'TLENRICHED',
'producer_client_id': 'enricher-',
'dispatchers': {} # will be generated at initialize()
}

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env python
# Ingestion handler
import sys
from ipaddress import ip_address
from base import BaseHandler
class Normalize(BaseHandler):
def handle_x509(self, message):
# for some reason the uid for x509 log types
# are 'id' instead. Lets fix that.
message['uid'] = message['id']
_ = message.pop('id')
return(message)
def handle_pe(self, message):
# same issuse as x509.
message['uid'] = message['id']
_ = message.pop('id')
return(message)
def handle_software(self, message):
fields = ('version.major', 'version.minor', 'version.minor2',
'version.minor3', 'version.addl')
for old in fields:
new = old.replace('.', '_')
try:
message[new] = message[old]
_ = message.pop(old)
except KeyError:
continue
return(message)
def handle_conn(self, message):
q = ip_address(message['id.orig_h'])
if q.version == 6:
return # don't pass along IPv6
return message
def __init__(self):
super(Normalize, self).__init__()
self.mymod = sys.modules[__name__].Normalize
self.settings = {
'consumer_topic': ['TLINGEST-qwf',
'TLINGEST-wyomic',
'TLINGEST-lodi'],
'consumer_group': 'normalize_group',
'producer_topic': 'TLNORMALIZED',
'producer_client_id': 'normalizer-',
'dispatchers': {} # will be generated at initialize()
}

View File

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env python
import json
from elasticsearch import Elasticsearch
class FileLogger(object):
def __init__(self, logfile):
self.logfile = logfile
self.flogger = open(self.logfile, 'a')
def send(self, message):
""" this buffers writes, be patient if your
tailing the log file """
self.flogger.write(json.dumps(message))
self.flogger.write('\n')
class ElasticLogger(object):
def __init__(self):
self.es = Elasticsearch(hosts=['10.15.0.45:9200',
'10.15.0.46:9200',
'10.15.0.47:9200'])
self.mapped_types = ('files', 'notice', 'http', 'reporter',
'communication', 'packet_filter', 'ssl', 'dpd',
'capture_loss', 'dns', 'loaded_scripts', 'stats',
'weird', 'conn', 'x509', 'ssh', 'pe', 'smb_files',
'smb_mapping', 'snmp', 'dce_rpc', 'smtp', 'rdp',
'software', 'tunnel', 'known_certs',
'known_devices', 'known_hosts', 'known_services',
'dhcp')
def send(self, message):
self.doctype = message['tltype']
self.docindex = 'threatline' # + self.doctype
""" Bro creates timestamps as floats like
xxxxxxxxx.xxxxx ... don't want this. """
message['ts'] = int(message['ts'])
try:
if self.doctype in self.mapped_types:
self.es.index(index=self.docindex,
doc_type=self.doctype,
body=message)
else:
return
except Exception as e:
print(e)
return

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env python
import os
import logging
from threat_intel import domain_intel
class Checker(object):
def __init__(self):
self.dnsbl = domain_intel()
def check_domain(self, domain):
if domain in self.dnsbl:
return {'alert_type': 'domain',
'domain': domain,
'info': self.dnsbl[domain]}
return None

View File

@@ -0,0 +1,62 @@
#!/usr/bin/env python
import logging
from ipwhois import IPWhois, utils
from ipaddress import ip_address
from netaddr import *
def mac_lookup(mac_address):
ret = {}
mac = EUI(mac_address)
try:
oui = mac.oui
except Exception as e:
logging.error('MAC {} not registered'.format(mac))
return None
ret['org'] = oui.registration().org
ret['address'] = ' '.join(oui.registration().address)
return ret
class IPClient(object):
def __init__(self):
self.cache = {} # cache for faster whois lookups
def is_defined(self, addr):
""" Checks if the IP is defined as loopback, multicast, etc. """
queryip = ip_address(addr)
if queryip.version == 4:
defined, _, _ = utils.ipv4_is_defined(addr)
if queryip.version == 6:
defined, _, _ = utils.ipv6_is_defined(addr)
return defined
def enrichip(self, addr):
""" Enriches the IP address if it's not reserved (defined) IP. """
if addr in self.cache:
logging.info('Whois cache hit')
return self.cache[addr]
if self.is_defined(addr):
return None
self.queryip = IPWhois(addr)
try:
r = self.queryip.lookup_rdap()
except Exception as e:
print(e)
return None
ent = r['entities'][0]
e = {'asn_num': r['asn'],
'asn_desc': r['asn_description'],
'asn_country_code': r['asn_country_code'],
'network': r['network']['cidr'],
'whois': r['objects'][ent]['contact']}
_ = e['whois'].pop('address')
_ = e['whois'].pop('email')
self.cache[addr] = e
return e

View File

@@ -0,0 +1,26 @@
#!/usr/bin/env python
import os
import logging
import requests
import ipaddress
domain_intel_sources = [
('MALWAREDOMAIN', 'http://bld.scoutsec.com/?genres=malware&style=list')
]
def get_download(url):
try:
return requests.get(url)
except Exception as e:
print(e)
return None
def domain_intel():
for source, url in domain_intel_sources:
d = get_download(url).text.split('\n')
d = {domain: source for domain in d}
return d

136
threatline/threatline.py Executable file
View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python
import sys
import json
import time
import signal
import logging
from kafka import KafkaConsumer, KafkaProducer
from kafka.errors import KafkaError
from multiprocessing import Process
VALID_STAGES = ['normalize', 'enrich', 'check', 'archive']
KAFKA_BOOTSTRAP = ['10.15.0.40:9092', '10.15.0.41:9092', '10.15.0.42:9092']
LOGFORMAT = '%(asctime)-15s %(message)s'
def get_consumer(topic, consumer_group):
consumer = KafkaConsumer(
value_deserializer=lambda m: json.loads(m.decode('utf-8')),
bootstrap_servers=KAFKA_BOOTSTRAP,
group_id=consumer_group,
enable_auto_commit=True
)
consumer.subscribe(topic)
return consumer
def get_producer(client_id):
producer = KafkaProducer(
value_serializer=lambda m: json.dumps(m).encode('utf-8'),
bootstrap_servers=KAFKA_BOOTSTRAP,
client_id=client_id,
compression_type='lz4'
)
return producer
def main(stage):
LOGFILE = '/var/log/' + stage + '_worker.log'
logging.basicConfig(level=logging.INFO, format=LOGFORMAT,
filename=LOGFILE)
logging.info('Launching {}'.format(stage))
logging.info('Singals registered')
normalize_flag = False
archive_flag = False
if stage == 'normalize':
from handlers.normalize import Normalize
handle = Normalize()
normalize_flag = True
elif stage == 'enrich':
from handlers.enrich import Enrich
handle = Enrich()
elif stage == 'check':
from handlers.check import Check
handle = Check()
elif stage == 'archive':
from handlers.archive import Archive
handle = Archive()
else:
print('Unknown stage \'{}\''.format(stage))
print('Valid stages: {}'.format(VALID_STAGES))
sys.exit(1)
handle.initialize() # build the function dictionary
alert_topic = None
consumer_topic = handle.settings['consumer_topic']
consumer_group = handle.settings['consumer_group']
producer_topic = handle.settings['producer_topic']
producer_client_id = handle.settings['producer_client_id']
dispatch = handle.settings['dispatchers']
consumer = get_consumer(consumer_topic, consumer_group)
if stage == 'archive': # we don't produce during archiving..
producer = None
else:
producer = get_producer(producer_client_id)
if stage == 'check':
alert_topic = handle.settings['alert_topic']
def sig_handler(signal, fname):
logging.warn('SIGTERM received. Shutting down...')
try:
consumer.close()
if producer:
producer.close(5.0)
except Exception as e:
logging.info(e)
finally:
sys.exit(1)
signal.signal(signal.SIGTERM, sig_handler)
for m in consumer:
message = m.value
if normalize_flag:
""" At the normalize stage, the log data is nested
with the key being the bro log-type."""
client_id = m.topic.replace('TLINGEST-', '')
message_type = message.keys()[0] # get the root key (log type)
message = message[message_type] # un-nest the log data
message['tltype'] = message_type # used at next stage
message['client_id'] = client_id # scoutsec client_id
message['enrichment'] = {} # for later enrichment
else:
message_type = message['tltype']
try:
# handle the message
pubdata = dispatch[message_type](message)
except Exception as ex:
logging.error(ex)
continue
if pubdata:
if 'alert' in pubdata and alert_topic:
if pubdata['alert']:
producer.send(alert_topic, pubdata)
if producer:
producer.send(producer_topic, pubdata)
if __name__ == '__main__':
if len(sys.argv) < 2:
print('usage: script.py {}'.format(VALID_STAGES))
sys.exit(1)
stages = sys.argv[1:]
if len(stages) < 8:
for s in stages:
p = Process(target=main, args=(s,))
p.start()
else:
LOGFILE = '/dev/stdout'
logging.basicConfig(level=logging.INFO, format=LOGFORMAT,
filename=LOGFILE)