1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
|
import logging
import re
import warnings
from contextlib import contextmanager
from time import time
from psycopg2 import Error, ProgrammingError # pylint: disable=import-error
from psycopg2.pool import ThreadedConnectionPool # pylint: disable=import-error
from Orange.data import ContinuousVariable, DiscreteVariable, StringVariable, TimeVariable
from Orange.data.sql.backend.base import Backend, ToSql, BackendError
log = logging.getLogger(__name__)
EXTENSIONS = ('tsm_system_time', 'quantile')
class Psycopg2Backend(Backend):
"""Backend for accessing data stored in a Postgres database
"""
display_name = "PostgreSQL"
connection_pool = None
auto_create_extensions = False
def __init__(self, connection_params):
super().__init__(connection_params)
if self.connection_pool is None:
self._create_connection_pool()
self.missing_extension = []
if self.auto_create_extensions:
self._create_extensions()
def _create_connection_pool(self):
try:
self.connection_pool = ThreadedConnectionPool(
1, 16, **self.connection_params)
except Error as ex:
raise BackendError(str(ex)) from ex
def _create_extensions(self):
for ext in EXTENSIONS:
try:
query = "CREATE EXTENSION IF NOT EXISTS {}".format(ext)
with self.execute_sql_query(query):
pass
except BackendError:
warnings.warn("Database is missing extension {}".format(ext))
self.missing_extension.append(ext)
def create_sql_query(self, table_name, fields, filters=(),
group_by=None, order_by=None,
offset=None, limit=None,
use_time_sample=None):
sql = ["SELECT", ', '.join(fields),
"FROM", table_name]
if use_time_sample is not None:
sql.append("TABLESAMPLE system_time(%i)" % use_time_sample)
if filters:
sql.extend(["WHERE", " AND ".join(filters)])
if group_by is not None:
sql.extend(["GROUP BY", ", ".join(group_by)])
if order_by is not None:
sql.extend(["ORDER BY", ",".join(order_by)])
if offset is not None:
sql.extend(["OFFSET", str(offset)])
if limit is not None:
sql.extend(["LIMIT", str(limit)])
return " ".join(sql)
@contextmanager
def execute_sql_query(self, query, params=None):
connection = self.connection_pool.getconn()
cur = connection.cursor()
try:
utfquery = cur.mogrify(query, params).decode('utf-8')
log.debug("Executing: %s", utfquery)
t = time()
cur.execute(query, params)
yield cur
log.info("%.2f ms: %s", 1000 * (time() - t), utfquery)
except (Error, ProgrammingError) as ex:
raise BackendError(str(ex)) from ex
finally:
connection.commit()
self.connection_pool.putconn(connection)
def quote_identifier(self, name):
return '"%s"' % name
def unquote_identifier(self, quoted_name):
if quoted_name.startswith('"'):
return quoted_name[1:len(quoted_name) - 1]
else:
return quoted_name
def list_tables_query(self, schema=None):
if schema:
schema_clause = "AND n.nspname = '{}'".format(schema)
else:
schema_clause = "AND pg_catalog.pg_table_is_visible(c.oid)"
return """SELECT n.nspname as "Schema",
c.relname AS "Name"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r','v','m','S','f','')
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
{}
AND NOT c.relname LIKE '\\_\\_%'
ORDER BY 1,2;""".format(schema_clause)
def n_tables_query(self, schema=None) -> str:
query = "SELECT COUNT(*) FROM information_schema.tables"
if schema:
query += f" WHERE table_schema = '{schema}'"
return query
def create_variable(self, field_name, field_metadata,
type_hints, inspect_table=None):
if field_name in type_hints:
var = type_hints[field_name]
else:
var = self._guess_variable(field_name, field_metadata,
inspect_table)
field_name_q = self.quote_identifier(field_name)
if var.is_continuous:
if isinstance(var, TimeVariable):
var.to_sql = ToSql("extract(epoch from {})"
.format(field_name_q))
else:
var.to_sql = ToSql("({})::double precision"
.format(field_name_q))
else: # discrete or string
var.to_sql = ToSql("({})::text"
.format(field_name_q))
return var
def _guess_variable(self, field_name, field_metadata, inspect_table):
type_code = field_metadata[0]
FLOATISH_TYPES = (700, 701, 1700) # real, float8, numeric
INT_TYPES = (20, 21, 23) # bigint, int, smallint
CHAR_TYPES = (25, 1042, 1043,) # text, char, varchar
BOOLEAN_TYPES = (16,) # bool
DATE_TYPES = (1082, 1114, 1184, ) # date, timestamp, timestamptz
# time, timestamp, timestamptz, timetz
TIME_TYPES = (1083, 1114, 1184, 1266,)
if type_code in FLOATISH_TYPES:
return ContinuousVariable.make(field_name)
if type_code in TIME_TYPES + DATE_TYPES:
tv = TimeVariable.make(field_name)
tv.have_date |= type_code in DATE_TYPES
tv.have_time |= type_code in TIME_TYPES
return tv
if type_code in INT_TYPES: # bigint, int, smallint
if inspect_table:
values = self.get_distinct_values(field_name, inspect_table)
if values:
return DiscreteVariable.make(field_name, values)
return ContinuousVariable.make(field_name)
if type_code in BOOLEAN_TYPES:
return DiscreteVariable.make(field_name, ['false', 'true'])
if type_code in CHAR_TYPES:
if inspect_table:
values = self.get_distinct_values(field_name, inspect_table)
# remove trailing spaces
values = [v.rstrip() for v in values]
if values:
return DiscreteVariable.make(field_name, values)
return StringVariable.make(field_name)
def count_approx(self, query):
sql = "EXPLAIN " + query
with self.execute_sql_query(sql) as cur:
s = ''.join(row[0] for row in cur.fetchall())
return int(re.findall(r'rows=(\d*)', s)[0])
def distinct_values_query(self, field_name: str, table_name: str) -> str:
fields = [self.quote_identifier(field_name)]
return self.create_sql_query(
table_name, fields, group_by=fields, order_by=fields, limit=21
)
def __getstate__(self):
# Drop connection_pool from state as it cannot be pickled
state = dict(self.__dict__)
state.pop('connection_pool', None)
return state
def __setstate__(self, state):
# Create a new connection pool if none exists
self.__dict__.update(state)
if self.connection_pool is None:
self._create_connection_pool()
|