This type of nonsense is why the world is what it is.

This commit is contained in:
bt3 2018-06-27 11:03:43 -07:00
parent df5b567099
commit 38d47f0f58
3 changed files with 0 additions and 101 deletions

View file

@ -1,5 +0,0 @@
# Nice ELK hacks
```
curl -s logs.HOST.com:9200/logstash-2017.09.08/_search\?q=ty_params.ProcessName:osqueryd\&size=10000\&sort=@timestamp:desc | jq -r '.hits.hits[]._source.ty_params.Username' | sort | uniq -c | sort -nr
```

View file

@ -1,47 +0,0 @@
import elasticsearch
import whois
import json
from elasticsearch import Elasticsearch
es = Elasticsearch([{ 'host': "HOST NAME"}])
query = {
'size': 100,
'query': {
'filtered': {
'query': {
'query_string': {
'query': 'type:named_query_log',
'analyze_wildcard': True
}
},
'filter': {
'bool': {
'must_not': {
'query_string': {
'query': '*HOST.com OR *otherhost.com',
'analyze_wildcard': True
}
}
}
}
}}}
# Make the search
res = es.search(index="LOG-NAME", body=query)
results = []
counter = 0
# Print out our results
for hit in res['hits']['hits']:
if "dns_dest" in hit['_source'].keys():
try:
results.append(json.dumps(whois.whois(hit['_source']['dns_dest'])))
except Exception as e:
pass
counter += 1
print "Scanning {0}/{1} domains, {2} succeeded..".format(counter, len(res['hits']['hits']), len(results))
with open('processed_domains.txt', 'w') as outfile:
json.dump(results, outfile)

View file

@ -1,49 +0,0 @@
#!/usr/bin/env python
import os
import subprocess
import json
import socket
import logging
LOG_PATH = "/var/log/logname.log"
FORWARD_PATH = "/etc/logstash-forwarder.conf"
LOG_LEVEL = logging.DEBUG
# Set up logpath
if not os.path.isfile(LOG_PATH):
logging.info("No {0} file. Calling: sudo touch {1}".format(LOG_PATH, LOG_PATH))
subprocess.call("sudo touch {0}".format(LOG_PATH), shell=True)
logging.info("Setting perms. Calling: sudo chmod 666 {0}".format(LOG_PATH))
subprocess.call("sudo chmod 666 {0}".format(LOG_PATH), shell=True)
# Set up forwarding
if os.path.isfile(FORWARD_PATH):
logging.info("Forwarding {0} to logstash...".format(FORWARD_PATH))
try:
with open(FORWARD_PATH, "r+") as f:
data = json.load(jsonFile)
try:
if LOG_PATH not in data['files'][0]['paths']:
data['files'][0]['paths'].append(LOG_PATH)
jsonFile = open("/etc/logstash-forwarder.conf", "w+")
jsonFile.write(json.dumps(data))
except KeyError:
logging.error("Could not set logstash: {0} is not well formated.".format(FORWARD_PATH))
except IOError:
logging.error("Could not open {0}".format(FORWARD_PATH))
else:
hostname = socket.gethostname()
#Search for logstash-forwarder locations per each host
if "prodvpn" in hostname:
logging.warning("Forwarder should be in {0}. Please set up a forwarder and try again.".format(FORWARD_PATH))