-
Notifications
You must be signed in to change notification settings - Fork 285
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #201 from CIRCL/python3
Python 3 migration + many new features + fixes
- Loading branch information
Showing
90 changed files
with
1,730 additions
and
1,410 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
#!/usr/bin/env python3 | ||
# -*-coding:UTF-8 -* | ||
|
||
""" | ||
The ApiKey Module | ||
====================== | ||
This module is consuming the Redis-list created by the Categ module. | ||
It apply API_key regexes on paste content and warn if above a threshold. | ||
""" | ||
|
||
import redis | ||
import pprint | ||
import time | ||
import re | ||
|
||
from packages import Paste | ||
from packages import lib_refine | ||
from pubsublogger import publisher | ||
|
||
from Helper import Process | ||
|
||
|
||
def search_api_key(message): | ||
filename, score = message.split() | ||
paste = Paste.Paste(filename) | ||
content = paste.get_p_content() | ||
|
||
aws_access_key = regex_aws_access_key.findall(content) | ||
aws_secret_key = regex_aws_secret_key.findall(content) | ||
google_api_key = regex_google_api_key.findall(content) | ||
|
||
if(len(aws_access_key) > 0 or len(aws_secret_key) > 0 or len(google_api_key) > 0): | ||
|
||
to_print = 'ApiKey;{};{};{};'.format( | ||
paste.p_source, paste.p_date, paste.p_name) | ||
if(len(google_api_key) > 0): | ||
print('found google api key') | ||
print(to_print) | ||
publisher.warning('{}Checked {} found Google API Key;{}'.format( | ||
to_print, len(google_api_key), paste.p_path)) | ||
|
||
if(len(aws_access_key) > 0 or len(aws_secret_key) > 0): | ||
print('found AWS key') | ||
print(to_print) | ||
total = len(aws_access_key) + len(aws_secret_key) | ||
publisher.warning('{}Checked {} found AWS Key;{}'.format( | ||
to_print, total, paste.p_path)) | ||
|
||
|
||
msg = 'apikey;{}'.format(filename) | ||
p.populate_set_out(msg, 'alertHandler') | ||
#Send to duplicate | ||
p.populate_set_out(filename, 'Duplicate') | ||
|
||
if __name__ == "__main__": | ||
publisher.port = 6380 | ||
publisher.channel = "Script" | ||
|
||
config_section = 'ApiKey' | ||
|
||
p = Process(config_section) | ||
|
||
publisher.info("ApiKey started") | ||
|
||
message = p.get_from_set() | ||
|
||
# TODO improve REGEX | ||
regex_aws_access_key = re.compile(r'(?<![A-Z0-9])=[A-Z0-9]{20}(?![A-Z0-9])') | ||
regex_aws_secret_key = re.compile(r'(?<!=[A-Za-z0-9+])=[A-Za-z0-9+]{40}(?![A-Za-z0-9+])') | ||
|
||
regex_google_api_key = re.compile(r'=AIza[0-9a-zA-Z-_]{35}') | ||
|
||
while True: | ||
|
||
message = p.get_from_set() | ||
|
||
if message is not None: | ||
|
||
search_api_key(message) | ||
|
||
|
||
else: | ||
publisher.debug("Script ApiKey is Idling 10s") | ||
time.sleep(10) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,136 @@ | ||
#!/usr/bin/env python3 | ||
# -*-coding:UTF-8 -* | ||
""" | ||
Base64 module | ||
Dectect Base64 and decode it | ||
""" | ||
import time | ||
import os | ||
import datetime | ||
|
||
from pubsublogger import publisher | ||
|
||
from Helper import Process | ||
from packages import Paste | ||
|
||
import re | ||
import base64 | ||
from hashlib import sha1 | ||
import magic | ||
import json | ||
|
||
import signal | ||
|
||
class TimeoutException(Exception): | ||
pass | ||
|
||
def timeout_handler(signum, frame): | ||
raise TimeoutException | ||
|
||
signal.signal(signal.SIGALRM, timeout_handler) | ||
|
||
|
||
def search_base64(content, message): | ||
find = False | ||
base64_list = re.findall(regex_base64, content) | ||
if(len(base64_list) > 0): | ||
|
||
for b64 in base64_list: | ||
if len(b64) >= 40 : | ||
decode = base64.b64decode(b64) | ||
|
||
type = magic.from_buffer(decode, mime=True) | ||
#print(type) | ||
#print(decode) | ||
|
||
find = True | ||
hash = sha1(decode).hexdigest() | ||
|
||
data = {} | ||
data['name'] = hash | ||
data['date'] = datetime.datetime.now().strftime("%d/%m/%y") | ||
data['origin'] = message | ||
data['estimated type'] = type | ||
json_data = json.dumps(data) | ||
|
||
save_base64_as_file(decode, type, hash, json_data) | ||
print('found {} '.format(type)) | ||
|
||
if(find): | ||
publisher.warning('base64 decoded') | ||
#Send to duplicate | ||
p.populate_set_out(message, 'Duplicate') | ||
#send to Browse_warning_paste | ||
msg = ('base64;{}'.format(message)) | ||
p.populate_set_out( msg, 'alertHandler') | ||
|
||
def save_base64_as_file(decode, type, hash, json_data): | ||
|
||
filename_b64 = os.path.join(os.environ['AIL_HOME'], | ||
p.config.get("Directories", "base64"), type, hash[:2], hash) | ||
|
||
filename_json = os.path.join(os.environ['AIL_HOME'], | ||
p.config.get("Directories", "base64"), type, hash[:2], hash + '.json') | ||
|
||
dirname = os.path.dirname(filename_b64) | ||
if not os.path.exists(dirname): | ||
os.makedirs(dirname) | ||
|
||
with open(filename_b64, 'wb') as f: | ||
f.write(decode) | ||
|
||
with open(filename_json, 'w') as f: | ||
f.write(json_data) | ||
|
||
|
||
|
||
|
||
if __name__ == '__main__': | ||
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) | ||
# Port of the redis instance used by pubsublogger | ||
publisher.port = 6380 | ||
# Script is the default channel used for the modules. | ||
publisher.channel = 'Script' | ||
|
||
# Section name in bin/packages/modules.cfg | ||
config_section = 'Base64' | ||
|
||
# Setup the I/O queues | ||
p = Process(config_section) | ||
max_execution_time = p.config.getint("Base64", "max_execution_time") | ||
|
||
# Sent to the logging a description of the module | ||
publisher.info("Base64 started") | ||
|
||
regex_base64 = '(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)' | ||
re.compile(regex_base64) | ||
|
||
# Endless loop getting messages from the input queue | ||
while True: | ||
# Get one message from the input queue | ||
message = p.get_from_set() | ||
if message is None: | ||
|
||
publisher.debug("{} queue is empty, waiting".format(config_section)) | ||
time.sleep(1) | ||
continue | ||
|
||
filename = message | ||
paste = Paste.Paste(filename) | ||
|
||
signal.alarm(max_execution_time) | ||
try: | ||
# Do something with the message from the queue | ||
#print(filename) | ||
content = paste.get_p_content() | ||
search_base64(content,message) | ||
|
||
# (Optional) Send that thing to the next queue | ||
#p.populate_set_out(something_has_been_done) | ||
|
||
except TimeoutException: | ||
print ("{0} processing timeout".format(paste.p_path)) | ||
continue | ||
else: | ||
signal.alarm(0) |
Oops, something went wrong.