fix context.destroy in proxy, get recent more efficiently #20

Merged
ben merged 18 commits from recent into main 2023-12-30 01:05:39 +00:00
3 changed files with 29 additions and 23 deletions

View File

@ -28,7 +28,7 @@ def dealer(dealer_addr, router_addr):
dealer.close()
router.close()
context.close()
context.destroy()
def proxy_buffering(frontend_addr, backend_addr, capture_addr=None,
@ -200,7 +200,7 @@ def proxy_buffering(frontend_addr, backend_addr, capture_addr=None,
# we never used to get here
frontend.close()
backend.close()
context.close()
context.destroy()
def proxy_forwarder(frontend_addr, backend_addr, capture_addr):
@ -236,7 +236,7 @@ def proxy_forwarder(frontend_addr, backend_addr, capture_addr):
backend.close()
if capture:
capture.close()
context.close()
context.destroy()
def capture(capture_addr):

View File

@ -70,8 +70,11 @@ class ScreenPublisher(Publisher):
return padding + msg
def make_weather(self):
current = Weather.get_recent(self.weather, 30*60)
return self.align_center(current.desc)
try:
current = Weather.get_recent(self.weather, 30*60)
return self.align_center(current.desc)
except Weather.DoesNotExist:
return "no weather"
def make_rain(self, weather):
return "~?~"
@ -95,18 +98,18 @@ class ScreenPublisher(Publisher):
# .replace does not mutate original string
shortname = a.replace('room', 'r')
t0 = time.time()
try:
t0 = time.time()
result = Temperatures.get_recent(a, secs=30*60)
t1 = time.time() - t0
logger.debug(f"query for: {t1:.3f}s, name='{a}'")
tempstr = f"{result.temp:.1f}"
if result.temp < 10.0:
tempstr = " " + tempstr
temps.append(f"{shortname}: {tempstr} C")
except KeyError:
logger.trace(f"no recent temp for '{a}'")
temps.append(f"{shortname}: -- C")
result = Temperatures.get_recent(a, 15*60)
result_str = str(round(result.temp, 1)).rjust(4)
except Temperatures.DoesNotExist:
logger.warning(f"No recent temp found for '{a}'")
result_str = "NONE"
t1 = time.time() - t0
t_total = round(t1, 3)
logger.debug(f"query for: {t_total}s, name='{a}'")
temps.append(f"{shortname}: {result_str} C")
fill = max([len(a) for a in temps])
chunks = chunk([a.rjust(fill) for a in temps], 2)

View File

@ -1,7 +1,7 @@
#!/usr/bin/python3
import json
from datetime import datetime, timezone, timedelta
from datetime import datetime, timedelta
import peewee
from peewee import DateTimeField, TextField, DecimalField, CharField, BooleanField
@ -34,15 +34,14 @@ def dbconnect(**mysqlconf):
def seconds(secs):
return datetime.now(timezone.utc)-timedelta(seconds=secs)
return datetime.now()-timedelta(seconds=secs)
class BaseModel(peewee.Model):
@classmethod
def get_last(cls, name):
# http://docs.peewee-orm.com/en/latest/peewee/querying.html
return cls.select().where(
cls.name == name).order_by(-cls.id).get()
return cls.select().where(cls.name == name).order_by(-cls.id).get()
@classmethod
def get_last_many(cls, names):
@ -50,9 +49,13 @@ class BaseModel(peewee.Model):
@classmethod
def get_recent(cls, name, secs):
return cls.select().where(
cls.time > seconds(secs) and cls.name == name).order_by(
cls.time.desc()).get()
last = cls.get_last(name)
last_age = datetime.utcnow() - last.time
if last_age.total_seconds() <= float(secs):
return last
else:
logger.info(f"Last value in {cls.__name__} is older than {secs}s, age: {last_age}")
raise cls.DoesNotExist
@classmethod
def retry_create(cls, *args, **kwargs):