disable the re-claiming of sites that are marked claimed from more than an hour ago, because sometimes pages legitimately take longer than an hour to brozzle; working on a better solution to this issue

This commit is contained in:
Noah Levitt 2017-06-19 11:21:02 -07:00
parent 7ae22381ef
commit 6bae53e646
3 changed files with 19 additions and 9 deletions

View file

@ -102,21 +102,28 @@ class RethinkDbFrontier:
["ACTIVE", r.minval], ["ACTIVE", r.maxval], ["ACTIVE", r.minval], ["ACTIVE", r.maxval],
index="sites_last_disclaimed") index="sites_last_disclaimed")
.order_by(index="sites_last_disclaimed") .order_by(index="sites_last_disclaimed")
.filter((r.row["claimed"] != True) | ( .filter(r.row["claimed"] != True)
r.row["last_claimed"] < r.now() - 60*60)) # XXX
# .filter((r.row["claimed"] != True) | (
# r.row["last_claimed"] < r.now() - 60*60))
.limit(1) .limit(1)
.update( .update(
# try to avoid a race condition resulting in multiple # try to avoid a race condition resulting in multiple
# brozzler-workers claiming the same site # brozzler-workers claiming the same site
# see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038 # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038
r.branch((r.row["claimed"] != True) | ( r.branch(r.row["claimed"] != True, {
r.row["last_claimed"] < r.now() - 60*60), {
"claimed": True, "last_claimed_by": worker_id, "claimed": True, "last_claimed_by": worker_id,
"last_claimed": doublethink.utcnow()}, {}), "last_claimed": doublethink.utcnow()}, {}),
# XXX
# r.branch((r.row["claimed"] != True) | (
# r.row["last_claimed"] < r.now() - 60*60), {
# "claimed": True, "last_claimed_by": worker_id,
# "last_claimed": doublethink.utcnow()}, {}),
return_changes=True)).run() return_changes=True)).run()
self._vet_result(result, replaced=[0,1], unchanged=[0,1]) self._vet_result(result, replaced=[0,1], unchanged=[0,1])
if result["replaced"] == 1: if result["replaced"] == 1:
if result["changes"][0]["old_val"]["claimed"]: if result["changes"][0]["old_val"]["claimed"]:
# XXX impossible at the moment
self.logger.warn( self.logger.warn(
"re-claimed site that was still marked 'claimed' " "re-claimed site that was still marked 'claimed' "
"because it was last claimed a long time ago " "because it was last claimed a long time ago "

View file

@ -32,7 +32,7 @@ def find_package_data(package):
setuptools.setup( setuptools.setup(
name='brozzler', name='brozzler',
version='1.1b12.dev257', version='1.1b12.dev258',
description='Distributed web crawling with browsers', description='Distributed web crawling with browsers',
url='https://github.com/internetarchive/brozzler', url='https://github.com/internetarchive/brozzler',
author='Noah Levitt', author='Noah Levitt',

View file

@ -723,14 +723,17 @@ def test_claim_site():
with pytest.raises(brozzler.NothingToClaim): with pytest.raises(brozzler.NothingToClaim):
claimed_site = frontier.claim_site(worker_id='test_claim_site') claimed_site = frontier.claim_site(worker_id='test_claim_site')
# site last_claimed more than 1 hour ago can be reclaimed ### temporarily changing this behavior
### # site last_claimed more than 1 hour ago can be reclaimed
site = claimed_site site = claimed_site
claimed_site = None claimed_site = None
site.last_claimed = doublethink.utcnow() - datetime.timedelta(minutes=65) site.last_claimed = doublethink.utcnow() - datetime.timedelta(minutes=65)
site.save() site.save()
### claimed_site = frontier.claim_site(worker_id='test_claim_site')
### assert claimed_site.id == site.id
with pytest.raises(brozzler.NothingToClaim):
claimed_site = frontier.claim_site(worker_id='test_claim_site') claimed_site = frontier.claim_site(worker_id='test_claim_site')
assert claimed_site.id == site.id
# clean up # clean up
rr.table('sites').get(claimed_site.id).delete().run() rr.table('sites').get(site.id).delete().run()