mirror of
https://github.com/Watchful1/PushshiftDumps.git
synced 2025-07-23 23:00:40 -04:00
Reorganize
This commit is contained in:
parent
3700b21b81
commit
4f1d70d34a
19 changed files with 0 additions and 105 deletions
55
personal/mongo/export_mongo.py
Normal file
55
personal/mongo/export_mongo.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
import json
|
||||
|
||||
import utils
|
||||
import discord_logging
|
||||
import pymongo
|
||||
import time
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
log = discord_logging.init_logging()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mongo_address = sys.argv[1] # 192.168.1.131
|
||||
client = pymongo.MongoClient(f"mongodb://{mongo_address}:27017", serverSelectionTimeoutMS=5000)
|
||||
log.info(f"Database connected at {mongo_address} on {client.admin.command('serverStatus')['host']}")
|
||||
|
||||
subreddits = [
|
||||
"PersonalFinanceCanada"
|
||||
]
|
||||
start_date = datetime(2020, 1, 1)
|
||||
end_date = datetime(2021, 1, 1)
|
||||
|
||||
for subreddit in subreddits:
|
||||
count = 0
|
||||
start_time = time.time()
|
||||
cursor = client.reddit_database.comments.find(
|
||||
filter={"subreddit": subreddit, "created_utc": {"$gte": int(start_date.timestamp()), "$lt": int(end_date.timestamp())}},
|
||||
projection={'_id': False},
|
||||
sort=[('created_utc', pymongo.ASCENDING)]
|
||||
)
|
||||
log.info(f"Got cursor in {int(time.time() - start_time)} seconds")
|
||||
|
||||
output_writer = utils.OutputZst(r"\\MYCLOUDPR4100\Public\reddit_final\{0}_comments.zst".format(subreddit))
|
||||
start_time = time.time()
|
||||
for comment in cursor:
|
||||
count += 1
|
||||
output_writer.write(json.dumps(comment, separators=(',', ':')))
|
||||
output_writer.write("\n")
|
||||
if count % 10000 == 0:
|
||||
log.info(f"{count:,} through {datetime.utcfromtimestamp(int(comment['created_utc'])).strftime('%Y-%m-%d %H:%M:%S')} in {int(time.time() - start_time)} seconds r/{subreddit}")
|
||||
|
||||
output_writer.close()
|
||||
log.info(f"{count:,} in {int(time.time() - start_time)} seconds r/{subreddit}")
|
||||
|
||||
|
||||
# db.comments.createIndex({subreddit:1}) // remove
|
||||
# db.comments.createIndex({subreddit:1, created_utc:1})
|
||||
# db.comments.createIndex({author:1, created_utc:1})
|
||||
# db.comments.createIndex({id:1})
|
||||
# db.submissions.createIndex({subreddit:1, created_utc:1})
|
||||
# db.submissions.createIndex({author:1, created_utc:1})
|
||||
# db.submissions.createIndex({id:1})
|
||||
# db.submissions.createIndex({created_utc:1})
|
||||
# db.comments.createIndex({created_utc:1})
|
57
personal/mongo/group_subs.py
Normal file
57
personal/mongo/group_subs.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
import utils
|
||||
import discord_logging
|
||||
import pymongo
|
||||
import time
|
||||
import sys
|
||||
|
||||
log = discord_logging.init_logging()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mongo_address = sys.argv[1] # 192.168.1.131
|
||||
client = pymongo.MongoClient(f"mongodb://{mongo_address}:27017", serverSelectionTimeoutMS=5000)
|
||||
log.info(f"Database connected at {mongo_address} on {client.admin.command('serverStatus')['host']}")
|
||||
|
||||
count = 0
|
||||
start_time = time.time()
|
||||
start_date = int(datetime(2021, 6, 1).timestamp())
|
||||
cursor = client.reddit_database.submissions.aggregate(
|
||||
[
|
||||
{"$match": {"created_utc": {"$gt": start_date}}},
|
||||
{"$project": {"subreddit": 1, "over_18": {"$cond": ["$over_18", 1, 0]}}},
|
||||
{"$group": {"_id": "$subreddit", "countTotal": {"$count": {}}, "countNsfw": {"$sum": "$over_18"}}},
|
||||
{"$match": {"countTotal": {"$gt": 100}}},
|
||||
],
|
||||
allowDiskUse=True
|
||||
)
|
||||
log.info(f"Got cursor in {int(time.time() - start_time)} seconds")
|
||||
|
||||
start_time = time.time()
|
||||
subreddits = []
|
||||
for subreddit in cursor:
|
||||
subreddit['percent'] = int((subreddit['countNsfw']/subreddit['countTotal'])*100)
|
||||
if subreddit['percent'] >= 10:
|
||||
subreddits.append(subreddit)
|
||||
count += 1
|
||||
if count % 100000 == 0:
|
||||
log.info(f"{count:,} in {int(time.time() - start_time)} seconds")
|
||||
|
||||
log.info(f"{count:,} in {int(time.time() - start_time)} seconds")
|
||||
|
||||
file_out = open(r"\\MYCLOUDPR4100\Public\reddit_final\subreddits.txt", 'w')
|
||||
for subreddit in sorted(subreddits, key=lambda item: (item['percent'], item['countTotal']), reverse=True):
|
||||
file_out.write(f"{subreddit['_id']: <22}{subreddit['countTotal']: <8}{subreddit['countNsfw']: <8}{subreddit['percent']}%\n")
|
||||
file_out.close()
|
||||
|
||||
|
||||
# db.comments.createIndex({subreddit:1}) // remove
|
||||
# db.comments.createIndex({subreddit:1, created_utc:1})
|
||||
# db.comments.createIndex({author:1, created_utc:1})
|
||||
# db.comments.createIndex({id:1})
|
||||
# db.submissions.createIndex({subreddit:1, created_utc:1})
|
||||
# db.submissions.createIndex({author:1, created_utc:1})
|
||||
# db.submissions.createIndex({id:1})
|
||||
# db.submissions.createIndex({created_utc:1})
|
||||
# db.comments.createIndex({created_utc:1})
|
62
personal/mongo/insert_mongo.py
Normal file
62
personal/mongo/insert_mongo.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
import utils
|
||||
import discord_logging
|
||||
import os
|
||||
import pymongo
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
log = discord_logging.init_logging()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mongo_address = sys.argv[1] # 192.168.1.131
|
||||
client = pymongo.MongoClient(f"mongodb://{mongo_address}:27017", serverSelectionTimeoutMS=5000)
|
||||
|
||||
log.info(f"Database connected at {mongo_address} on {client.admin.command('serverStatus')['host']}")
|
||||
|
||||
object_type = sys.argv[2]
|
||||
input_folder = sys.argv[3]
|
||||
input_files = []
|
||||
total_size = 0
|
||||
for subdir, dirs, files in os.walk(input_folder + os.sep + object_type):
|
||||
files.sort()
|
||||
for filename in files:
|
||||
input_path = os.path.join(subdir, filename)
|
||||
if input_path.endswith(".zst"):
|
||||
file_size = os.stat(input_path).st_size
|
||||
total_size += file_size
|
||||
input_files.append([input_path, file_size])
|
||||
|
||||
log.info(f"Processing {len(input_files)} files of {(total_size / (2 ** 30)):.2f} gigabytes")
|
||||
|
||||
collection = client.reddit_database[object_type]
|
||||
|
||||
log.info(f"Using collection {object_type} which has {collection.estimated_document_count()} objects already")
|
||||
|
||||
total_lines = 0
|
||||
total_bytes_processed = 0
|
||||
for input_file in input_files:
|
||||
file_lines = 0
|
||||
file_bytes_processed = 0
|
||||
created = None
|
||||
inserts = []
|
||||
for obj, line, file_bytes_processed in utils.read_obj_zst_meta(input_file[0]):
|
||||
inserts.append(obj)
|
||||
if len(inserts) >= 10000:
|
||||
collection.insert_many(inserts)
|
||||
inserts = []
|
||||
|
||||
created = datetime.utcfromtimestamp(int(obj['created_utc']))
|
||||
file_lines += 1
|
||||
if file_lines == 1:
|
||||
log.info(f"{created.strftime('%Y-%m-%d %H:%M:%S')} : {file_lines + total_lines:,} : 0% : {(total_bytes_processed / total_size) * 100:.0f}%")
|
||||
if file_lines % 100000 == 0:
|
||||
log.info(f"{created.strftime('%Y-%m-%d %H:%M:%S')} : {file_lines + total_lines:,} : {(file_bytes_processed / input_file[1]) * 100:.0f}% : {(total_bytes_processed / total_size) * 100:.0f}%")
|
||||
|
||||
if len(inserts) >= 0:
|
||||
collection.insert_many(inserts)
|
||||
total_lines += file_lines
|
||||
total_bytes_processed += input_file[1]
|
||||
log.info(f"{created.strftime('%Y-%m-%d %H:%M:%S')} : {total_lines:,} : 100% : {(total_bytes_processed / total_size) * 100:.0f}%")
|
||||
|
||||
log.info(f"Total: {total_lines}")
|
Loading…
Add table
Add a link
Reference in a new issue