mirror of
https://github.com/Watchful1/PushshiftDumps.git
synced 2025-07-23 14:50:35 -04:00
Some cleanup, optimize multiprocess
This commit is contained in:
parent
461028b401
commit
1a99630073
5 changed files with 246 additions and 237 deletions
|
@ -14,11 +14,21 @@ log.addHandler(logging.StreamHandler())
|
|||
|
||||
|
||||
def read_lines_zst(file_name):
|
||||
skip_to = 3566550750
|
||||
bytes_read = 0
|
||||
with open(file_name, 'rb') as file_handle:
|
||||
buffer = ''
|
||||
reader = zstandard.ZstdDecompressor(max_window_size=2**31).stream_reader(file_handle)
|
||||
#reader.read(40000000000)
|
||||
while True:
|
||||
chunk = reader.read(2**27).decode()
|
||||
data_chunk = reader.read(2**27)
|
||||
try:
|
||||
chunk = data_chunk.decode()
|
||||
except UnicodeDecodeError:
|
||||
log.info("Decoding error, reading a second chunk")
|
||||
data_chunk += reader.read(2**29)
|
||||
chunk = data_chunk.decode()
|
||||
|
||||
if not chunk:
|
||||
break
|
||||
lines = (buffer + chunk).split("\n")
|
||||
|
@ -27,6 +37,7 @@ def read_lines_zst(file_name):
|
|||
yield line, file_handle.tell()
|
||||
|
||||
buffer = lines[-1]
|
||||
|
||||
reader.close()
|
||||
|
||||
|
||||
|
@ -39,19 +50,20 @@ if __name__ == "__main__":
|
|||
field = "subreddit"
|
||||
value = "wallstreetbets"
|
||||
bad_lines = 0
|
||||
try:
|
||||
for line, file_bytes_processed in read_lines_zst(file_path):
|
||||
try:
|
||||
obj = json.loads(line)
|
||||
created = datetime.utcfromtimestamp(int(obj['created_utc']))
|
||||
temp = obj[field] == value
|
||||
except (KeyError, json.JSONDecodeError) as err:
|
||||
bad_lines += 1
|
||||
file_lines += 1
|
||||
if file_lines % 100000 == 0:
|
||||
log.info(f"{created.strftime('%Y-%m-%d %H:%M:%S')} : {file_lines:,} : {bad_lines:,} : {(file_bytes_processed / file_size) * 100:.0f}%")
|
||||
except Exception as err:
|
||||
log.info(err)
|
||||
# try:
|
||||
for line, file_bytes_processed in read_lines_zst(file_path):
|
||||
try:
|
||||
obj = json.loads(line)
|
||||
created = datetime.utcfromtimestamp(int(obj['created_utc']))
|
||||
temp = obj[field] == value
|
||||
except (KeyError, json.JSONDecodeError) as err:
|
||||
bad_lines += 1
|
||||
file_lines += 1
|
||||
if file_lines % 100000 == 0:
|
||||
log.info(f"{created.strftime('%Y-%m-%d %H:%M:%S')} : {file_lines:,} : {bad_lines:,} : {file_bytes_processed:,}:{(file_bytes_processed / file_size) * 100:.0f}%")
|
||||
|
||||
# except Exception as err:
|
||||
# log.info(err)
|
||||
|
||||
log.info(f"Complete : {file_lines:,} : {bad_lines:,}")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue