-
Notifications
You must be signed in to change notification settings - Fork 0
/
queries.py
110 lines (91 loc) · 3.36 KB
/
queries.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import os
from sqlalchemy import *
from sqlalchemy.orm import *
from entities import *
from datetime import datetime, timedelta
from sqlalchemy_utils import database_exists, create_database
class Queries:
"""
Contains the database queries
"""
def __init__(self):
"""
Based on environment variables will load either PostgreSQL on RDS or a local SQLite instance.
Creates all schema if it has not been initialized yet.
"""
print("Initializing query engine.")
if os.environ.get("ENV", None) == "AWS":
print("Connecting to serverless aurora database on AWS")
rds_user = os.environ.get("RDS_USER")
rds_password = os.environ.get("RDS_PASSWORD")
rds_port = os.environ.get("RDS_PORT", 3306)
rds_host = os.environ.get("RDS_HOST")
url = f'mysql+mysqlconnector://{rds_user}:{rds_password}@{rds_host}:{rds_port}/soup'
self.engine = create_engine(url)
if not database_exists(self.engine.url):
print("Database does not exist. Creating.")
create_database(self.engine.url)
print("Connected to mysql database on AWS")
# Do I actually... miss DI?
else:
print("Connecting to local sqlite database")
self.engine = create_engine('sqlite:///resources/soup.db')
print("Connecting to local sqlite database")
print("Creating Schema if necessary.")
Base.metadata.create_all(self.engine)
print("Creating session")
Session = sessionmaker(bind=self.engine)
self.session = Session()
print("Finished initializing query engine")
def image_exists(self, url):
"""
Indicates whether an image exists alreaday for the given url
:param the image to search
:return:
"""
return self.session.query(exists().where(Image.url == url)).scalar()
def top_soup(self, confidence=0.8):
"""
Finds the most recent classification that is likely soup.
:return:
"""
return self.session.query(Image)\
.filter(Image.soup_confidence >= confidence)\
.order_by(desc(Image.post_date))\
.first()
def most_recent_image_date(self):
"""
Finds the date of the most recent image.
:return:
"""
image = self.session.query(Image)\
.order_by(desc(Image.post_date))\
.first()
if not image:
return datetime.now().date() - timedelta(days=5)
return datetime.fromtimestamp(image.post_date).date()
def oldest_image_date(self):
"""
Finds the date of the oldest image.
:return:
"""
image = self.session.query(Image)\
.order_by(asc(Image.post_date))\
.first()
if not image:
return datetime.now().date() - timedelta(days=5)
return datetime.fromtimestamp(image.post_date).date()
def add(self, entry):
"""
I should likely refactor this.
:param entry: the entry to add
:return:
"""
self.session.add(entry)
# TODO consider making queries managed with a with statement and auto commit
def commit(self):
"""
I should likely refactor this.
:return:
"""
self.session.commit()