query
stringlengths 9
60
| language
stringclasses 1
value | code
stringlengths 105
25.7k
| url
stringlengths 91
217
|
---|---|---|---|
postgresql connection
|
python
|
def _connect(self):
"""Establish connection to PostgreSQL Database."""
if self._connParams:
self._conn = psycopg2.connect(**self._connParams)
else:
self._conn = psycopg2.connect('')
try:
ver_str = self._conn.get_parameter_status('server_version')
except AttributeError:
ver_str = self.getParam('server_version')
self._version = util.SoftwareVersion(ver_str)
|
https://github.com/aouyar/PyMunin/blob/4f58a64b6b37c85a84cc7e1e07aafaa0321b249d/pysysinfo/postgresql.py#L76-L86
|
postgresql connection
|
python
|
def _get_connection(self):
""" Returns connection to the postgres database.
Returns:
connection to postgres database who stores mpr data.
"""
if not getattr(self, '_connection', None):
logger.debug(
'Creating new connection.\n dsn: {}'
.format(self._dsn))
d = parse_url_to_dict(self._dsn)
self._connection = psycopg2.connect(
database=d['path'].strip('/'), user=d['username'], password=d['password'],
port=d['port'], host=d['hostname'])
# It takes some time to find the way how to get raw connection from sqlalchemy. So,
# I leave the commented code.
#
# self._engine = create_engine(self._dsn)
# self._connection = self._engine.raw_connection()
#
return self._connection
|
https://github.com/CivicSpleen/ambry/blob/d7f2be4bf1f7ffd086f3fadd4fcae60c32473e42/ambry/mprlib/backends/postgresql.py#L197-L219
|
postgresql connection
|
python
|
def _connect(self):
"""Connect to PostgreSQL, either by reusing a connection from the pool
if possible, or by creating the new connection.
:rtype: psycopg2.extensions.connection
:raises: pool.NoIdleConnectionsError
"""
# Attempt to get a cached connection from the connection pool
try:
connection = self._pool_manager.get(self.pid, self)
LOGGER.debug("Re-using connection for %s", self.pid)
except pool.NoIdleConnectionsError:
if self._pool_manager.is_full(self.pid):
raise
# Create a new PostgreSQL connection
kwargs = utils.uri_to_kwargs(self._uri)
LOGGER.debug("Creating a new connection for %s", self.pid)
connection = self._psycopg2_connect(kwargs)
self._pool_manager.add(self.pid, connection)
self._pool_manager.lock(self.pid, connection, self)
# Added in because psycopg2ct connects and leaves the connection in
# a weird state: consts.STATUS_DATESTYLE, returning from
# Connection._setup without setting the state as const.STATUS_OK
if utils.PYPY:
connection.reset()
# Register the custom data types
self._register_unicode(connection)
self._register_uuid(connection)
return connection
|
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/session.py#L273-L307
|
postgresql connection
|
python
|
def connect(url=None, schema=None, sql_path=None, multiprocessing=False):
"""Open a new connection to postgres via psycopg2/sqlalchemy
"""
if url is None:
url = os.environ.get("DATABASE_URL")
return Database(url, schema, sql_path=sql_path, multiprocessing=multiprocessing)
|
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/__init__.py#L15-L20
|
postgresql connection
|
python
|
def _connect(self):
"""Connect to PostgreSQL, either by reusing a connection from the pool
if possible, or by creating the new connection.
:rtype: psycopg2.extensions.connection
:raises: pool.NoIdleConnectionsError
"""
future = concurrent.Future()
# Attempt to get a cached connection from the connection pool
try:
connection = self._pool_manager.get(self.pid, self)
self._connections[connection.fileno()] = connection
future.set_result(connection)
# Add the connection to the IOLoop
self._ioloop.add_handler(connection.fileno(),
self._on_io_events,
ioloop.IOLoop.WRITE)
except pool.NoIdleConnectionsError:
self._create_connection(future)
return future
|
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/tornado_session.py#L244-L267
|
postgresql connection
|
python
|
def connection(self):
"""Get a steady, cached PostgreSQL connection from the pool."""
if self._connections:
if not self._connections.acquire(self._blocking):
raise TooManyConnections
try:
con = self._cache.get(0)
except Empty:
con = self.steady_connection()
return PooledPgConnection(self, con)
|
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledPg.py#L205-L214
|
postgresql connection
|
python
|
def get_connection(engine, host, user, port, password, database, ssl={}):
""" Returns a PostgreSQL or MySQL connection """
if engine == 'mysql':
# Connection
return get_mysql_connection(host, user, port, password, database, ssl)
elif engine == 'postgresql':
# Connection
return get_pg_connection(host, user, port, password, database, ssl)
else:
raise RuntimeError('`%s` is not a valid engine.' % engine)
|
https://github.com/gabfl/dbschema/blob/37722e6654e9f0374fac5518ebdca22f4c39f92f/src/schema_change.py#L81-L91
|
postgresql connection
|
python
|
def get_pg_connection(host, user, port, password, database, ssl={}):
""" PostgreSQL connection """
return psycopg2.connect(host=host,
user=user,
port=port,
password=password,
dbname=database,
sslmode=ssl.get('sslmode', None),
sslcert=ssl.get('sslcert', None),
sslkey=ssl.get('sslkey', None),
sslrootcert=ssl.get('sslrootcert', None),
)
|
https://github.com/gabfl/dbschema/blob/37722e6654e9f0374fac5518ebdca22f4c39f92f/src/schema_change.py#L109-L121
|
postgresql connection
|
python
|
def uri(host='localhost', port=5432, dbname='postgres', user='postgres',
password=None):
"""Return a PostgreSQL connection URI for the specified values.
:param str host: Host to connect to
:param int port: Port to connect on
:param str dbname: The database name
:param str user: User to connect as
:param str password: The password to use, None for no password
:return str: The PostgreSQL connection URI
"""
if port:
host = '%s:%s' % (host, port)
if password:
return 'postgresql://%s:%s@%s/%s' % (user, password, host, dbname)
return 'postgresql://%s@%s/%s' % (user, host, dbname)
|
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/utils.py#L82-L98
|
postgresql connection
|
python
|
def _connect(self, database=None):
"""
Connect to given database
"""
conn_args = {
'host': self.config['host'],
'user': self.config['user'],
'password': self.config['password'],
'port': self.config['port'],
'sslmode': self.config['sslmode'],
}
if database:
conn_args['database'] = database
else:
conn_args['database'] = 'postgres'
# libpq will use ~/.pgpass only if no password supplied
if self.config['password_provider'] == 'pgpass':
del conn_args['password']
try:
conn = psycopg2.connect(**conn_args)
except Exception as e:
self.log.error(e)
raise e
# Avoid using transactions, set isolation level to autocommit
conn.set_isolation_level(0)
return conn
|
https://github.com/python-diamond/Diamond/blob/0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47/src/collectors/postgres/postgres.py#L150-L179
|
postgresql connection
|
python
|
def _connect(self):
"""Connect to the MySQL database.
"""
try:
db = pymysql.connect(user=self.user, passwd=self.passwd,
host=self.host, port=self.port,
db=self.shdb, use_unicode=True)
return db, db.cursor()
except Exception:
logger.error("Database connection error")
raise
|
https://github.com/chaoss/grimoirelab-elk/blob/64e08b324b36d9f6909bf705145d6451c8d34e65/grimoire_elk/enriched/database.py#L44-L55
|
postgresql connection
|
python
|
def connect(host=None, database=None, user=None, password=None, **kwargs):
"""Create a database connection."""
host = host or os.environ['PGHOST']
database = database or os.environ['PGDATABASE']
user = user or os.environ['PGUSER']
password = password or os.environ['PGPASSWORD']
return psycopg2.connect(host=host,
database=database,
user=user,
password=password,
**kwargs)
|
https://github.com/portfoliome/postpy/blob/fe26199131b15295fc5f669a0ad2a7f47bf490ee/postpy/connections.py#L8-L20
|
postgresql connection
|
python
|
def get_pg_info():
"""Check PostgreSQL connection."""
from psycopg2 import connect, OperationalError
log.debug("entered get_pg_info")
try:
conf = settings.DATABASES['default']
database = conf["NAME"]
user = conf["USER"]
host = conf["HOST"]
port = conf["PORT"]
password = conf["PASSWORD"]
except (AttributeError, KeyError):
log.error("No PostgreSQL connection info found in settings.")
return {"status": NO_CONFIG}
except TypeError:
return {"status": DOWN}
log.debug("got past getting conf")
try:
start = datetime.now()
connection = connect(
database=database, user=user, host=host,
port=port, password=password, connect_timeout=TIMEOUT_SECONDS,
)
log.debug("at end of context manager")
micro = (datetime.now() - start).microseconds
connection.close()
except (OperationalError, KeyError) as ex:
log.error("No PostgreSQL connection info found in settings. %s Error: %s",
conf, ex)
return {"status": DOWN}
log.debug("got to end of postgres check successfully")
return {"status": UP, "response_microseconds": micro}
|
https://github.com/mitodl/django-server-status/blob/99bd29343138f94a08718fdbd9285e551751777b/server_status/views.py#L30-L61
|
postgresql connection
|
python
|
def connections(self, name):
"""Returns a list of existing connections to the named database."""
stmt = """
select {fields} from pg_stat_activity
where datname = {datname!r} and pid <> pg_backend_pid()
""".format(fields=', '.join(CONNECTION_FIELDS), datname=name)
return list(Connection(**x) for x in self._iter_results(stmt))
|
https://github.com/drkjam/pydba/blob/986c4b1315d6b128947c3bc3494513d8e5380ff0/pydba/postgres.py#L153-L159
|
postgresql connection
|
python
|
def fetch_query_from_pgdb(self, qname, query, con, cxn, limit=None, force=False):
"""
Supply either an already established connection, or connection parameters.
The supplied connection will override any separate cxn parameter
:param qname: The name of the query to save the output to
:param query: The SQL query itself
:param con: The already-established connection
:param cxn: The postgres connection information
:param limit: If you only want a subset of rows from the query
:return:
"""
if con is None and cxn is None:
LOG.error("ERROR: you need to supply connection information")
return
if con is None and cxn is not None:
con = psycopg2.connect(
host=cxn['host'], database=cxn['database'], port=cxn['port'],
user=cxn['user'], password=cxn['password'])
outfile = '/'.join((self.rawdir, qname))
cur = con.cursor()
# wrap the query to get the count
countquery = ' '.join(("SELECT COUNT(*) FROM (", query, ") x"))
if limit is not None:
countquery = ' '.join((countquery, "LIMIT", str(limit)))
# check local copy.
# assume that if the # rows are the same, that the table is the same
# TEC - opinion:
# the only thing to assume is that if the counts are different
# is the data could not be the same.
#
# i.e: for MGI, the dbinfo table has a single row that changes
# to check if they are the same sort & compare digests. (
filerowcount = -1
tablerowcount = -1
if not force:
if os.path.exists(outfile):
# get rows in the file
filerowcount = self.file_len(outfile)
LOG.info("INFO: rows in local file: %s", filerowcount)
# get rows in the table
# tablerowcount=cur.rowcount
cur.execute(countquery)
tablerowcount = cur.fetchone()[0]
# rowcount-1 because there's a header
if force or filerowcount < 0 or (filerowcount-1) != tablerowcount:
if force:
LOG.info("Forcing download of %s", qname)
else:
LOG.info(
"%s local (%s) different from remote (%s); fetching.",
qname, filerowcount, tablerowcount)
# download the file
LOG.debug("COMMAND:%s", query)
outputquery = """
COPY ({0}) TO STDOUT WITH DELIMITER AS '\t' CSV HEADER""".format(query)
with open(outfile, 'w') as f:
cur.copy_expert(outputquery, f)
# Regenerate row count to check integrity
filerowcount = self.file_len(outfile)
if (filerowcount-1) < tablerowcount:
raise Exception(
"Download from %s failed, %s != %s", cxn['host'] + ':' +
cxn['database'], (filerowcount-1), tablerowcount)
elif (filerowcount-1) > tablerowcount:
LOG.warning(
"Fetched from %s more rows in file (%s) than reported in count(%s)",
cxn['host'] + ':'+cxn['database'], (filerowcount-1), tablerowcount)
else:
LOG.info("local data same as remote; reusing.")
return
|
https://github.com/monarch-initiative/dipper/blob/24cc80db355bbe15776edc5c7b41e0886959ba41/dipper/sources/PostgreSQLSource.py#L113-L187
|
postgresql connection
|
python
|
def connect(db_url=None,
pooling=hgvs.global_config.uta.pooling,
application_name=None,
mode=None,
cache=None):
"""Connect to a uta/ncbi database instance.
:param db_url: URL for database connection
:type db_url: string
:param pooling: whether to use connection pooling (postgresql only)
:type pooling: bool
:param application_name: log application name in connection (useful for debugging; PostgreSQL only)
:type application_name: str
When called with an explicit db_url argument, that db_url is used for connecting.
When called without an explicit argument, the function default is
determined by the environment variable UTA_DB_URL if it exists, or
hgvs.datainterface.uta.public_db_url otherwise.
>>> hdp = connect()
>>> hdp.schema_version()
'1.1'
The format of the db_url is driver://user:pass@host/database (the same
as that used by SQLAlchemy). Examples:
A remote public postgresql database:
postgresql://anonymous:anonymous@uta.biocommons.org/uta'
A local postgresql database:
postgresql://localhost/uta
A local SQLite database:
sqlite:////tmp/uta-0.0.6.db
For postgresql db_urls, pooling=True causes connect to use a
psycopg2.pool.ThreadedConnectionPool.
"""
_logger.debug('connecting to ' + str(db_url) + '...')
if db_url is None:
db_url = _get_ncbi_db_url()
url = _parse_url(db_url)
if url.scheme == 'postgresql':
conn = NCBI_postgresql(
url=url, pooling=pooling, application_name=application_name, mode=mode, cache=cache)
else:
# fell through connection scheme cases
raise RuntimeError("{url.scheme} in {url} is not currently supported".format(url=url))
_logger.info('connected to ' + str(db_url) + '...')
return conn
|
https://github.com/biocommons/hgvs/blob/4d16efb475e1802b2531a2f1c373e8819d8e533b/hgvs/dataproviders/ncbi.py#L55-L108
|
postgresql connection
|
python
|
def _get_conn():
'''
Return a postgres connection.
'''
try:
conn = psycopg2.connect(
host=__opts__['master_job_cache.postgres.host'],
user=__opts__['master_job_cache.postgres.user'],
password=__opts__['master_job_cache.postgres.passwd'],
database=__opts__['master_job_cache.postgres.db'],
port=__opts__['master_job_cache.postgres.port'])
except psycopg2.OperationalError:
log.error('Could not connect to SQL server: %s', sys.exc_info()[0])
return None
return conn
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/returners/postgres_local_cache.py#L139-L153
|
postgresql connection
|
python
|
def connection(self):
"""Attempts to connect to the MySQL server.
:return: Bound MySQL connection object if successful or ``None`` if
unsuccessful.
"""
ctx = _app_ctx_stack.top
if ctx is not None:
if not hasattr(ctx, 'mysql_db'):
ctx.mysql_db = self.connect
return ctx.mysql_db
|
https://github.com/admiralobvious/flask-mysqldb/blob/418c794e9b031addd026f29312865403baea55a0/flask_mysqldb/__init__.py#L84-L95
|
postgresql connection
|
python
|
def connect(cls, database: str, user: str, password: str, host: str, port: int, *, use_pool: bool=True,
enable_ssl: bool=False, minsize=1, maxsize=50, keepalives_idle=5, keepalives_interval=4, echo=False,
**kwargs):
"""
Sets connection parameters
For more information on the parameters that is accepts,
see : http://www.postgresql.org/docs/9.2/static/libpq-connect.html
"""
cls._connection_params['database'] = database
cls._connection_params['user'] = user
cls._connection_params['password'] = password
cls._connection_params['host'] = host
cls._connection_params['port'] = port
cls._connection_params['sslmode'] = 'prefer' if enable_ssl else 'disable'
cls._connection_params['minsize'] = minsize
cls._connection_params['maxsize'] = maxsize
cls._connection_params['keepalives_idle'] = keepalives_idle
cls._connection_params['keepalives_interval'] = keepalives_interval
cls._connection_params['echo'] = echo
cls._connection_params.update(kwargs)
cls._use_pool = use_pool
|
https://github.com/nerandell/cauldron/blob/d363bac763781bb2da18debfa0fdd4be28288b92/cauldron/sql.py#L130-L150
|
postgresql connection
|
python
|
def steady_connection(self):
"""Get a steady, unpooled PostgreSQL connection."""
return SteadyPgConnection(self._maxusage, self._setsession, True,
*self._args, **self._kwargs)
|
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledPg.py#L200-L203
|
postgresql connection
|
python
|
def _connection(username=None, password=None, host=None, port=None):
"returns a connected cursor to the database-server."
c_opts = {}
if username: c_opts['user'] = username
if password: c_opts['passwd'] = password
if host: c_opts['host'] = host
if port: c_opts['port'] = port
dbc = MySQLdb.connect(**c_opts)
dbc.autocommit(True)
return dbc
|
https://github.com/bmaeser/pyque/blob/856dceab8d89cf3771cf21e682466c29a85ae8eb/pyque/db/mysql.py#L37-L49
|
postgresql connection
|
python
|
def _connection(username=None, password=None, host=None, port=None, db=None):
"returns a connected cursor to the database-server."
c_opts = {}
if username: c_opts['user'] = username
if password: c_opts['password'] = password
if host: c_opts['host'] = host
if port: c_opts['port'] = port
if db: c_opts['database'] = db
dbc = psycopg2.connect(**c_opts)
dbc.autocommit = True
return dbc
|
https://github.com/bmaeser/pyque/blob/856dceab8d89cf3771cf21e682466c29a85ae8eb/pyque/db/postgresql.py#L55-L68
|
postgresql connection
|
python
|
def connect(self, host='127.0.0.1', port=3306, user='root', password='', database=None):
""" Connect to the database specified """
if database is None:
raise exceptions.RequiresDatabase()
self._db_args = { 'host': host, 'port': port, 'user': user, 'password': password, 'database': database }
with self._db_conn() as conn:
conn.query('SELECT 1')
return self
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_utility.py#L13-L22
|
postgresql connection
|
python
|
def connection_url(self, name=None):
"""
Provides a connection string for database as a sqlalchemy compatible URL.
NB - this doesn't include special arguments related to SSL connectivity (which are outside the scope
of the connection URL format).
Parameters
----------
name: str, optional
an override database name for the connection string.
Returns
-------
str: the connection URL (e.g. postgresql://user1@localhost:5432/db1)
"""
return 'postgresql://{user}@{host}:{port}/{dbname}'.format(**{k: v for k, v in self._connect_options(name)})
|
https://github.com/drkjam/pydba/blob/986c4b1315d6b128947c3bc3494513d8e5380ff0/pydba/postgres.py#L258-L274
|
postgresql connection
|
python
|
def connect(self):
"""Initialize the database connection."""
self._client = self._create_client()
self._db = getattr(self._client, self._db_name)
self._generic_dao = GenericDAO(self._client, self._db_name)
|
https://github.com/chovanecm/sacredboard/blob/47e1c99e3be3c1b099d3772bc077f5666020eb0b/sacredboard/app/data/pymongo/mongodb.py#L44-L48
|
postgresql connection
|
python
|
def _connect(self, config):
"""Establish a connection with a MySQL database."""
if 'connection_timeout' not in self._config:
self._config['connection_timeout'] = 480
try:
self._cnx = connect(**config)
self._cursor = self._cnx.cursor()
self._printer('\tMySQL DB connection established with db', config['database'])
except Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
raise err
|
https://github.com/mrstephenneal/mysql-toolkit/blob/6964f718f4b72eb30f2259adfcfaf3090526c53d/mysql/toolkit/components/connector.py#L74-L87
|
postgresql connection
|
python
|
def connect(self):
"""
Create a SQL Server connection and return a connection object
"""
connection = _mssql.connect(user=self.user,
password=self.password,
server=self.host,
port=self.port,
database=self.database)
return connection
|
https://github.com/spotify/luigi/blob/c5eca1c3c3ee2a7eb612486192a0da146710a1e9/luigi/contrib/mssqldb.py#L119-L128
|
postgresql connection
|
python
|
def _create_connection(self, future):
"""Create a new PostgreSQL connection
:param tornado.concurrent.Future future: future for new conn result
"""
LOGGER.debug('Creating a new connection for %s', self.pid)
# Create a new PostgreSQL connection
kwargs = utils.uri_to_kwargs(self._uri)
try:
connection = self._psycopg2_connect(kwargs)
except (psycopg2.Error, OSError, socket.error) as error:
future.set_exception(error)
return
# Add the connection for use in _poll_connection
fd = connection.fileno()
self._connections[fd] = connection
def on_connected(cf):
"""Invoked by the IOLoop when the future is complete for the
connection
:param Future cf: The future for the initial connection
"""
if cf.exception():
self._cleanup_fd(fd, True)
future.set_exception(cf.exception())
else:
try:
# Add the connection to the pool
LOGGER.debug('Connection established for %s', self.pid)
self._pool_manager.add(self.pid, connection)
except (ValueError, pool.PoolException) as err:
LOGGER.exception('Failed to add %r to the pool', self.pid)
self._cleanup_fd(fd)
future.set_exception(err)
return
self._pool_manager.lock(self.pid, connection, self)
# Added in because psycopg2cffi connects and leaves the
# connection in a weird state: consts.STATUS_DATESTYLE,
# returning from Connection._setup without setting the state
# as const.STATUS_OK
if utils.PYPY:
connection.status = extensions.STATUS_READY
# Register the custom data types
self._register_unicode(connection)
self._register_uuid(connection)
# Set the future result
future.set_result(connection)
# Add a future that fires once connected
self._futures[fd] = concurrent.Future()
self._ioloop.add_future(self._futures[fd], on_connected)
# Add the connection to the IOLoop
self._ioloop.add_handler(connection.fileno(),
self._on_io_events,
ioloop.IOLoop.WRITE)
|
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/tornado_session.py#L269-L336
|
postgresql connection
|
python
|
def connect_database(url):
"""
create database object by url
mysql:
mysql+type://user:passwd@host:port/database
sqlite:
# relative path
sqlite+type:///path/to/database.db
# absolute path
sqlite+type:////path/to/database.db
# memory database
sqlite+type://
mongodb:
mongodb+type://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]
more: http://docs.mongodb.org/manual/reference/connection-string/
sqlalchemy:
sqlalchemy+postgresql+type://user:passwd@host:port/database
sqlalchemy+mysql+mysqlconnector+type://user:passwd@host:port/database
more: http://docs.sqlalchemy.org/en/rel_0_9/core/engines.html
redis:
redis+taskdb://host:port/db
elasticsearch:
elasticsearch+type://host:port/?index=pyspider
local:
local+projectdb://filepath,filepath
type:
taskdb
projectdb
resultdb
"""
db = _connect_database(url)
db.copy = lambda: _connect_database(url)
return db
|
https://github.com/binux/pyspider/blob/3fccfabe2b057b7a56d4a4c79dc0dd6cd2239fe9/pyspider/database/__init__.py#L11-L46
|
postgresql connection
|
python
|
def connection(self):
"""
Get a connection to the database or raise an exception
"""
connection = self._get_connection()
if connection:
return connection
else:
message = "GTF database needs to be created"
if self.install_string:
message += ", run: %s" % self.install_string
raise ValueError(message)
|
https://github.com/openvax/pyensembl/blob/4b995fb72e848206d6fbf11950cf30964cd9b3aa/pyensembl/database.py#L271-L282
|
postgresql connection
|
python
|
def connect(self, db_uri, debug=False):
"""Configure connection to a SQL database.
Args:
db_uri (str): path/URI to the database to connect to
debug (Optional[bool]): whether to output logging information
"""
kwargs = {'echo': debug, 'convert_unicode': True}
# connect to the SQL database
if 'mysql' in db_uri:
kwargs['pool_recycle'] = 3600
elif '://' not in db_uri:
logger.debug("detected sqlite path URI: {}".format(db_uri))
db_path = os.path.abspath(os.path.expanduser(db_uri))
db_uri = "sqlite:///{}".format(db_path)
self.engine = create_engine(db_uri, **kwargs)
logger.debug('connection established successfully')
# make sure the same engine is propagated to the BASE classes
BASE.metadata.bind = self.engine
# start a session
self.session = scoped_session(sessionmaker(bind=self.engine))
# shortcut to query method
self.query = self.session.query
return self
|
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/store.py#L62-L86
|
postgresql connection
|
python
|
def __connect(self):
"""
Connect to the database.
"""
self.__methods = _get_methods_by_uri(self.sqluri)
uri_connect_method = self.__methods[METHOD_CONNECT]
self.__dbapi2_conn = uri_connect_method(self.sqluri)
|
https://github.com/decryptus/sonicprobe/blob/72f73f3a40d2982d79ad68686e36aa31d94b76f8/sonicprobe/libs/anysql.py#L360-L367
|
postgresql connection
|
python
|
def connect(db='', **kwargs):
"""
db 접속 공통 인자들 채워서 접속, schema만 넣으면 됩니다.
db connection 객체 반환이지만
with 문과 같이 쓰이면 cursor임에 주의 (MySQLdb의 구현이 그렇습니다.)
ex1)
import snipy.database as db
conn = db.connect('my_db')
cursor = conn.cursor()
ex2)
import snipy.database as db
with db.connect('my_db') as cursor:
cursor.execute(query)
:param db: str: db schema
:param kwargs: 추가 접속 정보
:return: connection or cursor
"""
arg = db_config(db)
arg.update(kwargs)
return MySQLdb.connect(**arg)
|
https://github.com/dade-ai/snipy/blob/408520867179f99b3158b57520e2619f3fecd69b/snipy/database.py#L41-L62
|
postgresql connection
|
python
|
def _connect(self):
"""Establish connection to MySQL Database."""
if self._connParams:
self._conn = MySQLdb.connect(**self._connParams)
else:
self._conn = MySQLdb.connect('')
|
https://github.com/aouyar/PyMunin/blob/4f58a64b6b37c85a84cc7e1e07aafaa0321b249d/pysysinfo/mysql.py#L64-L69
|
postgresql connection
|
python
|
def close(self):
""" Closes connection to database. """
if getattr(self, '_connection', None):
logger.debug('Closing postgresql connection.')
self._connection.close()
self._connection = None
if getattr(self, '_engine', None):
self._engine.dispose()
|
https://github.com/CivicSpleen/ambry/blob/d7f2be4bf1f7ffd086f3fadd4fcae60c32473e42/ambry/mprlib/backends/postgresql.py#L104-L111
|
postgresql connection
|
python
|
def connect(self, dsn):
"""Connect to DB.
dbname: the database name user: user name used to authenticate password:
password used to authenticate host: database host address (defaults to UNIX
socket if not provided) port: connection port number (defaults to 5432 if not
provided)
"""
self.con = psycopg2.connect(dsn)
self.cur = self.con.cursor(cursor_factory=psycopg2.extras.DictCursor)
# autocommit: Disable automatic transactions
self.con.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/management/commands/util/util.py#L114-L126
|
postgresql connection
|
python
|
def _connect_db(self):
""" Open database connection
"""
# Get database configuration
db_args = {}
db_args['host'] = self._cfg.get('nipapd', 'db_host')
db_args['database'] = self._cfg.get('nipapd', 'db_name')
db_args['user'] = self._cfg.get('nipapd', 'db_user')
db_args['password'] = self._cfg.get('nipapd', 'db_pass')
db_args['sslmode'] = self._cfg.get('nipapd', 'db_sslmode')
db_args['port'] = self._cfg.get('nipapd', 'db_port')
# delete keys that are None, for example if we want to connect over a
# UNIX socket, the 'host' argument should not be passed into the DSN
if db_args['host'] is not None and db_args['host'] == '':
db_args['host'] = None
for key in db_args.copy():
if db_args[key] is None:
del(db_args[key])
# Create database connection
while True:
try:
self._con_pg = psycopg2.connect(**db_args)
self._con_pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
self._curs_pg = self._con_pg.cursor(cursor_factory=psycopg2.extras.DictCursor)
self._register_inet()
psycopg2.extras.register_hstore(self._con_pg, globally=True, unicode=True)
except psycopg2.Error as exc:
if re.search("database.*does not exist", unicode(exc)):
raise NipapDatabaseNonExistentError("Database '%s' does not exist" % db_args['database'])
# no hstore extension, assume empty db (it wouldn't work
# otherwise) and do auto upgrade?
if re.search("hstore type not found in the database", unicode(exc)):
# automatically install if auto-install is enabled
if self._auto_install_db:
self._db_install(db_args['database'])
continue
raise NipapDatabaseMissingExtensionError("hstore extension not found in the database")
self._logger.error("pgsql: %s" % exc)
raise NipapError("Backend unable to connect to database")
except psycopg2.Warning as warn:
self._logger.warning('pgsql: %s' % warn)
# check db version
try:
current_db_version = self._get_db_version()
except NipapDatabaseNoVersionError as exc:
# if there's no db schema version we assume the database is
# empty...
if self._auto_install_db:
# automatically install schema?
self._db_install(db_args['database'])
continue
raise exc
except NipapError as exc:
self._logger.error(unicode(exc))
raise exc
if current_db_version != nipap.__db_version__:
if self._auto_upgrade_db:
self._db_upgrade(db_args['database'])
continue
raise NipapDatabaseWrongVersionError("NIPAP PostgreSQL database is outdated. Schema version %s is required to run but you are using %s" % (nipap.__db_version__, current_db_version))
# if we reach this we should be fine and done
break
|
https://github.com/SpriteLink/NIPAP/blob/f96069f11ab952d80b13cab06e0528f2d24b3de9/nipap/nipap/backend.py#L750-L817
|
postgresql connection
|
python
|
def connection(self):
"""Get a steady, persistent PyGreSQL connection."""
try:
con = self.thread.connection
except AttributeError:
con = self.steady_connection()
self.thread.connection = con
return con
|
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PersistentPg.py#L166-L173
|
postgresql connection
|
python
|
def get_mysql_connection(host, user, port, password, database, ssl={}):
""" MySQL connection """
return pymysql.connect(host=host,
user=user,
port=port,
password=password,
db=database,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor,
client_flag=pymysql.constants.CLIENT.MULTI_STATEMENTS,
ssl=ssl
)
|
https://github.com/gabfl/dbschema/blob/37722e6654e9f0374fac5518ebdca22f4c39f92f/src/schema_change.py#L94-L106
|
postgresql connection
|
python
|
def connect(self):
"""
Get a psycopg2 connection object to the database where the table is.
"""
connection = psycopg2.connect(
host=self.host,
port=self.port,
database=self.database,
user=self.user,
password=self.password)
connection.set_client_encoding('utf-8')
return connection
|
https://github.com/spotify/luigi/blob/c5eca1c3c3ee2a7eb612486192a0da146710a1e9/luigi/contrib/postgres.py#L187-L198
|
postgresql connection
|
python
|
def set_mysql_connection(host='localhost', user='pyctd_user', password='pyctd_passwd', db='pyctd', charset='utf8'):
"""Sets the connection using MySQL Parameters"""
set_connection('mysql+pymysql://{user}:{passwd}@{host}/{db}?charset={charset}'.format(
host=host,
user=user,
passwd=password,
db=db,
charset=charset)
)
|
https://github.com/cebel/pyctd/blob/38ba02adaddb60cef031d3b75516773fe8a046b5/src/pyctd/manager/database.py#L454-L462
|
postgresql connection
|
python
|
def db_connect(connection_string=None, **kwargs):
"""Function to supply a database connection object."""
if connection_string is None:
connection_string = get_current_registry().settings[CONNECTION_STRING]
db_conn = psycopg2.connect(connection_string, **kwargs)
try:
with db_conn:
yield db_conn
finally:
db_conn.close()
|
https://github.com/openstax/cnx-publishing/blob/f55b4a2c45d8618737288f1b74b4139d5ac74154/cnxpublishing/db.py#L49-L58
|
postgresql connection
|
python
|
def get_connection(db_type, db_pth, user=None, password=None, name=None):
""" Get a connection to a SQL database. Can be used for SQLite, MySQL or Django MySQL database
Example:
>>> from msp2db.db import get_connection
>>> conn = get_connection('sqlite', 'library.db')
If using "mysql" mysql.connector needs to be installed.
If using "django_mysql" Django needs to be installed.
Args:
db_type (str): Type of database can either be "sqlite", "mysql" or "django_mysql"
Returns:
sql connection object
"""
if db_type == 'sqlite':
print(db_pth)
conn = sqlite3.connect(db_pth)
elif db_type == 'mysql':
import mysql.connector
conn = mysql.connector.connect(user=user, password=password, database=name)
elif db_type == 'django_mysql':
from django.db import connection as conn
else:
print('unsupported database type: {}, choices are "sqlite", "mysql" or "django_mysql"'.format(db_type))
return conn
|
https://github.com/computational-metabolomics/msp2db/blob/f86f01efca26fd2745547c9993f97337c6bef123/msp2db/db.py#L99-L129
|
postgresql connection
|
python
|
def pg_isready(self):
"""Runs pg_isready to see if PostgreSQL is accepting connections.
:returns: 'ok' if PostgreSQL is up, 'reject' if starting up, 'no_resopnse' if not up."""
cmd = [self._pgcommand('pg_isready'), '-p', self._local_address['port'], '-d', self._database]
# Host is not set if we are connecting via default unix socket
if 'host' in self._local_address:
cmd.extend(['-h', self._local_address['host']])
# We only need the username because pg_isready does not try to authenticate
if 'username' in self._superuser:
cmd.extend(['-U', self._superuser['username']])
ret = subprocess.call(cmd)
return_codes = {0: STATE_RUNNING,
1: STATE_REJECT,
2: STATE_NO_RESPONSE,
3: STATE_UNKNOWN}
return return_codes.get(ret, STATE_UNKNOWN)
|
https://github.com/zalando/patroni/blob/f6d29081c90af52064b981cdd877a07338d86038/patroni/postgresql.py#L286-L306
|
postgresql connection
|
python
|
def connect(self):
"""connect to the database
**Return:**
- ``dbConn`` -- the database connection
See the class docstring for usage
"""
self.log.debug('starting the ``get`` method')
dbSettings = self.dbSettings
port = False
if "tunnel" in dbSettings and dbSettings["tunnel"]:
port = self._setup_tunnel(
tunnelParameters=dbSettings["tunnel"]
)
# SETUP A DATABASE CONNECTION
host = dbSettings["host"]
user = dbSettings["user"]
passwd = dbSettings["password"]
dbName = dbSettings["db"]
dbConn = ms.connect(
host=host,
user=user,
passwd=passwd,
db=dbName,
port=port,
use_unicode=True,
charset='utf8',
local_infile=1,
client_flag=ms.constants.CLIENT.MULTI_STATEMENTS,
connect_timeout=36000,
max_allowed_packet=51200000
)
if self.autocommit:
dbConn.autocommit(True)
self.log.debug('completed the ``get`` method')
return dbConn
|
https://github.com/thespacedoctor/fundamentals/blob/1d2c007ac74442ec2eabde771cfcacdb9c1ab382/fundamentals/mysql/database.py#L85-L125
|
postgresql connection
|
python
|
def connect_db(Repo, database=":memory:"):
"""
Connect Repo to a database with path +database+ so all instances can
interact with the database.
"""
Repo.db = sqlite3.connect(database,
detect_types=sqlite3.PARSE_DECLTYPES)
return Repo.db
|
https://github.com/ECESeniorDesign/lazy_record/blob/929d3cc7c2538b0f792365c0d2b0e0d41084c2dd/lazy_record/repo.py#L294-L301
|
postgresql connection
|
python
|
def connect(db_url=None,
pooling=hgvs.global_config.uta.pooling,
application_name=None,
mode=None,
cache=None):
"""Connect to a UTA database instance and return a UTA interface instance.
:param db_url: URL for database connection
:type db_url: string
:param pooling: whether to use connection pooling (postgresql only)
:type pooling: bool
:param application_name: log application name in connection (useful for debugging; PostgreSQL only)
:type application_name: str
When called with an explicit db_url argument, that db_url is used for connecting.
When called without an explicit argument, the function default is
determined by the environment variable UTA_DB_URL if it exists, or
hgvs.datainterface.uta.public_db_url otherwise.
>>> hdp = connect()
>>> hdp.schema_version()
'1.1'
The format of the db_url is driver://user:pass@host/database/schema (the same
as that used by SQLAlchemy). Examples:
A remote public postgresql database:
postgresql://anonymous:anonymous@uta.biocommons.org/uta/uta_20170707'
A local postgresql database:
postgresql://localhost/uta_dev/uta_20170707
For postgresql db_urls, pooling=True causes connect to use a
psycopg2.pool.ThreadedConnectionPool.
"""
_logger.debug('connecting to ' + str(db_url) + '...')
if db_url is None:
db_url = _get_uta_db_url()
url = _parse_url(db_url)
if url.scheme == 'sqlite':
conn = UTA_sqlite(url, mode, cache)
elif url.scheme == 'postgresql':
conn = UTA_postgresql(
url=url, pooling=pooling, application_name=application_name, mode=mode, cache=cache)
else:
# fell through connection scheme cases
raise RuntimeError("{url.scheme} in {url} is not currently supported".format(url=url))
_logger.info('connected to ' + str(db_url) + '...')
return conn
|
https://github.com/biocommons/hgvs/blob/4d16efb475e1802b2531a2f1c373e8819d8e533b/hgvs/dataproviders/uta.py#L62-L114
|
postgresql connection
|
python
|
def cache_connect(database=None):
"""Returns a connection object to a sqlite database.
Args:
database (str, optional): The path to the database the user wishes
to connect to. If not specified, a default is chosen using
:func:`.cache_file`. If the special database name ':memory:'
is given, then a temporary database is created in memory.
Returns:
:class:`sqlite3.Connection`
"""
if database is None:
database = cache_file()
if os.path.isfile(database):
# just connect to the database as-is
conn = sqlite3.connect(database)
else:
# we need to populate the database
conn = sqlite3.connect(database)
conn.executescript(schema)
with conn as cur:
# turn on foreign keys, allows deletes to cascade.
cur.execute("PRAGMA foreign_keys = ON;")
conn.row_factory = sqlite3.Row
return conn
|
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L37-L67
|
postgresql connection
|
python
|
def connect(self, url: str):
"""Connect to the database and set it as main database
:param url: path to the database, uses the Sqlalchemy format
:type url: str
:example: ``ds.connect("sqlite:///mydb.slqite")``
"""
try:
self.db = dataset.connect(url, row_type=stuf)
except Exception as e:
self.err(e, "Can not connect to database")
return
if self.db is None:
self.err("Database " + url + " not found")
return
self.ok("Db", self.db.url, "connected")
|
https://github.com/synw/dataswim/blob/4a4a53f80daa7cd8e8409d76a19ce07296269da2/dataswim/db/__init__.py#L23-L39
|
postgresql connection
|
python
|
def _reconnect(self):
"""Closes the existing database connection and re-opens it."""
self.close()
self._db = psycopg2.connect(**self._db_args)
if self._search_path:
self.execute('set search_path=%s;' % self._search_path)
if self._timezone:
self.execute("set timezone='%s';" % self._timezone)
|
https://github.com/stevepeak/tornpsql/blob/a109d0f95d6432d0e3b5eba1c9854357ba527f27/tornpsql/__init__.py#L134-L143
|
postgresql connection
|
python
|
def set_mysql_connection(host='localhost', user='pyhgnc_user', passwd='pyhgnc_passwd', db='pyhgnc',
charset='utf8'):
"""Method to set a MySQL connection
:param str host: MySQL database host
:param str user: MySQL database user
:param str passwd: MySQL database password
:param str db: MySQL database name
:param str charset: MySQL database charater set
:return: connection string
:rtype: str
"""
connection_string = 'mysql+pymysql://{user}:{passwd}@{host}/{db}?charset={charset}'.format(
host=host,
user=user,
passwd=passwd,
db=db,
charset=charset
)
set_connection(connection_string)
return connection_string
|
https://github.com/LeKono/pyhgnc/blob/1cae20c40874bfb51581b7c5c1481707e942b5d0/src/pyhgnc/manager/database.py#L467-L489
|
postgresql connection
|
python
|
def get_connection(db=DATABASE):
""" Returns a new connection to the database. """
return database.connect(host=HOST, port=PORT, user=USER, password=PASSWORD, database=db)
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/examples/multi_threaded_inserts.py#L30-L32
|
postgresql connection
|
python
|
def connect(dbapi_connection, connection_record):
"""
Called once by SQLAlchemy for each new SQLite DB-API connection.
Here is where we issue some PRAGMA statements to configure how we're
going to access the SQLite database.
@param dbapi_connection:
A newly connected raw SQLite DB-API connection.
@param connection_record:
Unused by this method.
"""
try:
cursor = dbapi_connection.cursor()
try:
cursor.execute("PRAGMA foreign_keys = ON;")
cursor.execute("PRAGMA foreign_keys;")
if cursor.fetchone()[0] != 1:
raise Exception()
finally:
cursor.close()
except Exception:
dbapi_connection.close()
raise sqlite3.Error()
|
https://github.com/fabioz/PyDev.Debugger/blob/ed9c4307662a5593b8a7f1f3389ecd0e79b8c503/pydevd_attach_to_process/winappdbg/sql.py#L101-L125
|
postgresql connection
|
python
|
def connection_dsn(self, name=None):
"""
Provides a connection string for database.
Parameters
----------
name: str, optional
an override database name for the connection string.
Returns
-------
str: the connection string (e.g. 'dbname=db1 user=user1 host=localhost port=5432')
"""
return ' '.join("%s=%s" % (param, value) for param, value in self._connect_options(name))
|
https://github.com/drkjam/pydba/blob/986c4b1315d6b128947c3bc3494513d8e5380ff0/pydba/postgres.py#L243-L256
|
postgresql connection
|
python
|
def _connect(self):
"""
Connect to the MySQL server
"""
self._close()
self.conn = MySQLdb.Connect(host=self.hostname,
port=self.port,
user=self.username,
passwd=self.password,
db=self.database)
|
https://github.com/python-diamond/Diamond/blob/0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47/src/diamond/handler/mysql.py#L92-L101
|
postgresql connection
|
python
|
def _connection_defaults(user=None, host=None, port=None, maintenance_db=None):
'''
Returns a tuple of (user, host, port, db) with config, pillar, or default
values assigned to missing values.
'''
if not user:
user = __salt__['config.option']('postgres.user')
if not host:
host = __salt__['config.option']('postgres.host')
if not port:
port = __salt__['config.option']('postgres.port')
if not maintenance_db:
maintenance_db = __salt__['config.option']('postgres.maintenance_db')
return (user, host, port, maintenance_db)
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/postgres.py#L344-L358
|
postgresql connection
|
python
|
def connect(self):
"""
Connects to the database server.
"""
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '.*deprecated.*')
self._conn = client.connect(
init_command=self.init_fun,
sql_mode="NO_ZERO_DATE,NO_ZERO_IN_DATE,ERROR_FOR_DIVISION_BY_ZERO,"
"STRICT_ALL_TABLES,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION",
charset=config['connection.charset'],
**self.conn_info)
self._conn.autocommit(True)
|
https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/connection.py#L88-L100
|
postgresql connection
|
python
|
def _get_db_connect(dbSystem,db,user,password):
"""Create a connection to the database specified on the command line
"""
if dbSystem=='SYBASE':
import Sybase
try:
dbh = Sybase.connect(dbSystem,
user,
password,
database=db )
except:
dbh=None
elif dbSystem=='MYSQL':
import MySQLdb
try:
dbh = MySQLdb.connect(user=user,
passwd=password,
db=db ,
host='gimli')
except:
dbh=None
return dbh
|
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/webCat/MOPdbaccess.py#L73-L95
|
postgresql connection
|
python
|
def connection(self, name=None):
"""
Get a database connection instance
:param name: The connection name
:type name: str
:return: A Connection instance
:rtype: orator.connections.connection.Connection
"""
name, type = self._parse_connection_name(name)
if name not in self._connections:
logger.debug("Initiating connection %s" % name)
connection = self._make_connection(name)
self._set_connection_for_type(connection, type)
self._connections[name] = self._prepare(connection)
return self._connections[name]
|
https://github.com/sdispater/orator/blob/bd90bf198ee897751848f9a92e49d18e60a74136/orator/database_manager.py#L28-L48
|
postgresql connection
|
python
|
def _connect(self):
"""Try to create a connection to the database if not yet connected.
"""
if self._connection is not None:
raise RuntimeError('Close connection first.')
self._connection = connect(self._database, **self._kwds)
self._connection.isolation_level = None
|
https://github.com/merry-bits/DBQuery/blob/5f46dc94e2721129f8a799b5f613373e6cd9cb73/src/dbquery/sqlite.py#L27-L33
|
postgresql connection
|
python
|
def connect(backend=None, host=None, port=None, name=None, max_tries=None,
connection_timeout=None, replicaset=None, ssl=None, login=None, password=None,
ca_cert=None, certfile=None, keyfile=None, keyfile_passphrase=None,
crlfile=None):
"""Create a new connection to the database backend.
All arguments default to the current configuration's values if not
given.
Args:
backend (str): the name of the backend to use.
host (str): the host to connect to.
port (int): the port to connect to.
name (str): the name of the database to use.
replicaset (str): the name of the replica set (only relevant for
MongoDB connections).
Returns:
An instance of :class:`~bigchaindb.backend.connection.Connection`
based on the given (or defaulted) :attr:`backend`.
Raises:
:exc:`~ConnectionError`: If the connection to the database fails.
:exc:`~ConfigurationError`: If the given (or defaulted) :attr:`backend`
is not supported or could not be loaded.
:exc:`~AuthenticationError`: If there is a OperationFailure due to
Authentication failure after connecting to the database.
"""
backend = backend or bigchaindb.config['database']['backend']
host = host or bigchaindb.config['database']['host']
port = port or bigchaindb.config['database']['port']
dbname = name or bigchaindb.config['database']['name']
# Not sure how to handle this here. This setting is only relevant for
# mongodb.
# I added **kwargs for both RethinkDBConnection and MongoDBConnection
# to handle these these additional args. In case of RethinkDBConnection
# it just does not do anything with it.
#
# UPD: RethinkDBConnection is not here anymore cause we no longer support RethinkDB.
# The problem described above might be reconsidered next time we introduce a backend,
# if it ever happens.
replicaset = replicaset or bigchaindb.config['database'].get('replicaset')
ssl = ssl if ssl is not None else bigchaindb.config['database'].get('ssl', False)
login = login or bigchaindb.config['database'].get('login')
password = password or bigchaindb.config['database'].get('password')
ca_cert = ca_cert or bigchaindb.config['database'].get('ca_cert', None)
certfile = certfile or bigchaindb.config['database'].get('certfile', None)
keyfile = keyfile or bigchaindb.config['database'].get('keyfile', None)
keyfile_passphrase = keyfile_passphrase or bigchaindb.config['database'].get('keyfile_passphrase', None)
crlfile = crlfile or bigchaindb.config['database'].get('crlfile', None)
try:
module_name, _, class_name = BACKENDS[backend].rpartition('.')
Class = getattr(import_module(module_name), class_name)
except KeyError:
raise ConfigurationError('Backend `{}` is not supported. '
'BigchainDB currently supports {}'.format(backend, BACKENDS.keys()))
except (ImportError, AttributeError) as exc:
raise ConfigurationError('Error loading backend `{}`'.format(backend)) from exc
logger.debug('Connection: {}'.format(Class))
return Class(host=host, port=port, dbname=dbname,
max_tries=max_tries, connection_timeout=connection_timeout,
replicaset=replicaset, ssl=ssl, login=login, password=password,
ca_cert=ca_cert, certfile=certfile, keyfile=keyfile,
keyfile_passphrase=keyfile_passphrase, crlfile=crlfile)
|
https://github.com/bigchaindb/bigchaindb/blob/835fdfcf598918f76139e3b88ee33dd157acaaa7/bigchaindb/backend/connection.py#L21-L87
|
postgresql connection
|
python
|
def init_db_conn(connection_name, connection_string, scopefunc=None):
"""
Initialize a postgresql connection by each connection string
defined in the configuration file
"""
engine = create_engine(connection_string)
session = scoped_session(sessionmaker(), scopefunc=scopefunc)
session.configure(bind=engine)
pool.connections[connection_name] = Connection(engine, session)
|
https://github.com/Riffstation/flask-philo/blob/76c9d562edb4a77010c8da6dfdb6489fa29cbc9e/flask_philo/db/postgresql/connection.py#L47-L55
|
postgresql connection
|
python
|
def _configure_connection(self, name, value):
"""Sets a Postgres run-time connection configuration parameter.
:param name: the name of the parameter
:param value: a list of values matching the placeholders
"""
self.update("pg_settings", dict(setting=value), dict(name=name))
|
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/db/access/data_access.py#L87-L93
|
postgresql connection
|
python
|
def connect_mysql(host, port, user, password, database):
"""Connect to MySQL with retries."""
return pymysql.connect(
host=host, port=port,
user=user, passwd=password,
db=database
)
|
https://github.com/openstack/monasca-common/blob/61e2e00454734e2881611abec8df0d85bf7655ac/docker/mysql_check.py#L109-L115
|
postgresql connection
|
python
|
def connect(self):
"""Try to connect to the database.
Raises:
:exc:`~ConnectionError`: If the connection to the database
fails.
"""
attempt = 0
for i in self.max_tries_counter:
attempt += 1
try:
self._conn = self._connect()
except ConnectionError as exc:
logger.warning('Attempt %s/%s. Connection to %s:%s failed after %sms.',
attempt, self.max_tries if self.max_tries != 0 else '∞',
self.host, self.port, self.connection_timeout)
if attempt == self.max_tries:
logger.critical('Cannot connect to the Database. Giving up.')
raise ConnectionError() from exc
else:
break
|
https://github.com/bigchaindb/bigchaindb/blob/835fdfcf598918f76139e3b88ee33dd157acaaa7/bigchaindb/backend/connection.py#L148-L169
|
postgresql connection
|
python
|
def connect(
database: Union[str, Path], *, loop: asyncio.AbstractEventLoop = None, **kwargs: Any
) -> Connection:
"""Create and return a connection proxy to the sqlite database."""
if loop is None:
loop = asyncio.get_event_loop()
def connector() -> sqlite3.Connection:
if isinstance(database, str):
loc = database
elif isinstance(database, bytes):
loc = database.decode("utf-8")
else:
loc = str(database)
return sqlite3.connect(loc, **kwargs)
return Connection(connector, loop)
|
https://github.com/jreese/aiosqlite/blob/3f548b568b8db9a57022b6e2c9627f5cdefb983f/aiosqlite/core.py#L287-L304
|
postgresql connection
|
python
|
def get_connection(connection_details=None):
""" Creates a connection to the MySQL DB. """
if connection_details is None:
connection_details = get_default_connection_details()
return MySQLdb.connect(
connection_details['host'],
connection_details['user'],
connection_details['password'],
connection_details['database']
)
|
https://github.com/evetrivia/thanatos/blob/664c12a8ccf4d27ab0e06e0969bbb6381f74789c/thanatos/database/db_utils.py#L103-L114
|
postgresql connection
|
python
|
def connect_to_database_odbc_sqlserver(self,
odbc_connection_string: str = None,
dsn: str = None,
database: str = None,
user: str = None,
password: str = None,
server: str = "localhost",
driver: str = "{SQL Server}",
autocommit: bool = True) -> None:
"""Connects to an SQL Server database via ODBC."""
self.connect(engine=ENGINE_SQLSERVER, interface=INTERFACE_ODBC,
odbc_connection_string=odbc_connection_string,
dsn=dsn,
database=database, user=user, password=password,
host=server, driver=driver,
autocommit=autocommit)
|
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/rnc_db.py#L1983-L1998
|
postgresql connection
|
python
|
def parse_connection_string_libpq(connection_string):
"""parse a postgresql connection string as defined in
http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING"""
fields = {}
while True:
connection_string = connection_string.strip()
if not connection_string:
break
if "=" not in connection_string:
raise ValueError("expecting key=value format in connection_string fragment {!r}".format(connection_string))
key, rem = connection_string.split("=", 1)
if rem.startswith("'"):
asis, value = False, ""
for i in range(1, len(rem)):
if asis:
value += rem[i]
asis = False
elif rem[i] == "'":
break # end of entry
elif rem[i] == "\\":
asis = True
else:
value += rem[i]
else:
raise ValueError("invalid connection_string fragment {!r}".format(rem))
connection_string = rem[i + 1:] # pylint: disable=undefined-loop-variable
else:
res = rem.split(None, 1)
if len(res) > 1:
value, connection_string = res
else:
value, connection_string = rem, ""
fields[key] = value
return fields
|
https://github.com/aiven/pghoard/blob/2994165d4ef3ff7a5669a2527346bcbfb5b3bd8a/pghoard/pgutil.py#L67-L100
|
postgresql connection
|
python
|
def reconnect(self):
"""Closes the existing database connection and re-opens it."""
conn = _mysql.connect(**self._db_args)
if conn is not None:
self.close()
self._db = conn
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/database.py#L95-L100
|
postgresql connection
|
python
|
def _connect(dbfile: 'PathLike') -> apsw.Connection:
"""Connect to SQLite database file."""
conn = apsw.Connection(os.fspath(dbfile))
_set_foreign_keys(conn, 1)
assert _get_foreign_keys(conn) == 1
return conn
|
https://github.com/darkfeline/animanager/blob/55d92e4cbdc12aac8ebe302420d2cff3fa9fa148/animanager/animecmd.py#L127-L132
|
postgresql connection
|
python
|
def execute(connection: connection, statement: str) -> Optional[List[Tuple[str, ...]]]:
"""Execute PGSQL statement and fetches the statement response.
Parameters
----------
connection: psycopg2.extensions.connection
Active connection to a PostGreSQL database.
statement: str
PGSQL statement to run against the database.
Returns
-------
response: list or None
List of tuples, where each tuple represents a formatted line of response from the database engine, where
each tuple item roughly corresponds to a column. For instance, while a raw SELECT response might include
the table headers, psycopg2 returns only the rows that matched. If no response was given, None is returned.
"""
response = list() # type: List
# See the following link for reasoning behind both with statements:
# http://initd.org/psycopg/docs/usage.html#with-statement
#
# Additionally, the with statement makes this library safer to use with
# higher-level libraries (e.g. SQLAlchemy) that don't inherently respect
# PostGreSQL's autocommit isolation-level, since the transaction is
# properly completed for each statement.
with connection:
with connection.cursor(cursor_factory=Psycopg2Cursor) as cursor:
cursor.execute(statement)
connection.commit()
# Get response
try:
response = cursor.fetchall()
if not response:
# Empty response list
log('<No Response>', logger_name=_LOGGER_NAME)
return None
except ProgrammingError as e:
if e.args and e.args[0] == 'no results to fetch':
# No response available (i.e. no response given)
log('<No Response>', logger_name=_LOGGER_NAME)
return None
# Some other programming error; re-raise
raise e
log('Response', logger_name=_LOGGER_NAME)
log('--------', logger_name=_LOGGER_NAME)
for line in response:
log(str(line), logger_name=_LOGGER_NAME)
return response
|
https://github.com/shawalli/psycopg2-pgevents/blob/bf04c05839a27c56834b26748d227c71cd87257c/psycopg2_pgevents/sql.py#L37-L89
|
postgresql connection
|
python
|
def connect(self):
"""connect to the various databases, the credientals and settings of which are found in the sherlock settings file
**Return:**
- ``transientsDbConn`` -- the database hosting the transient source data
- ``cataloguesDbConn`` -- connection to the database hosting the contextual catalogues the transients are to be crossmatched against
- ``pmDbConn`` -- connection to the PESSTO Marshall database
See the class docstring for usage
.. todo ::
- update key arguments values and definitions with defaults
- update return values and definitions
- update usage examples and text
- update docstring text
- check sublime snippet exists
- clip any useful text to docs mindmap
- regenerate the docs and check redendering of this docstring
"""
self.log.debug('starting the ``get`` method')
transientSettings = self.settings[
"database settings"]["transients"]
catalogueSettings = self.settings[
"database settings"]["static catalogues"]
if "pessto marshall" in self.settings[
"database settings"]:
marshallSettings = self.settings[
"database settings"]["pessto marshall"]
else:
marshallSettings = False
dbConns = []
for dbSettings in [transientSettings, catalogueSettings, marshallSettings]:
port = False
if dbSettings and dbSettings["tunnel"]:
port = self._setup_tunnel(
tunnelParameters=dbSettings["tunnel"]
)
if dbSettings:
# SETUP A DATABASE CONNECTION FOR THE STATIC CATALOGUES
host = dbSettings["host"]
user = dbSettings["user"]
passwd = dbSettings["password"]
dbName = dbSettings["db"]
thisConn = ms.connect(
host=host,
user=user,
passwd=passwd,
db=dbName,
port=port,
use_unicode=True,
charset='utf8',
client_flag=ms.constants.CLIENT.MULTI_STATEMENTS,
connect_timeout=3600
)
thisConn.autocommit(True)
dbConns.append(thisConn)
else:
dbConns.append(None)
# CREATE A DICTIONARY OF DATABASES
dbConns = {
"transients": dbConns[0],
"catalogues": dbConns[1],
"marshall": dbConns[2]
}
dbVersions = {}
for k, v in dbConns.iteritems():
if v:
sqlQuery = u"""
SELECT VERSION() as v;
""" % locals()
rows = readquery(
log=self.log,
sqlQuery=sqlQuery,
dbConn=v,
quiet=False
)
version = rows[0]['v']
dbVersions[k] = version
else:
dbVersions[k] = None
self.log.debug('completed the ``get`` method')
return dbConns, dbVersions
|
https://github.com/thespacedoctor/sherlock/blob/2c80fb6fa31b04e7820e6928e3d437a21e692dd3/sherlock/database.py#L83-L171
|
postgresql connection
|
python
|
def _connect(self):
"""Try to connect to the database.
Raises:
:exc:`~ConnectionError`: If the connection to the database
fails.
:exc:`~AuthenticationError`: If there is a OperationFailure due to
Authentication failure after connecting to the database.
:exc:`~ConfigurationError`: If there is a ConfigurationError while
connecting to the database.
"""
try:
# FYI: the connection process might raise a
# `ServerSelectionTimeoutError`, that is a subclass of
# `ConnectionFailure`.
# The presence of ca_cert, certfile, keyfile, crlfile implies the
# use of certificates for TLS connectivity.
if self.ca_cert is None or self.certfile is None or \
self.keyfile is None or self.crlfile is None:
client = pymongo.MongoClient(self.host,
self.port,
replicaset=self.replicaset,
serverselectiontimeoutms=self.connection_timeout,
ssl=self.ssl,
**MONGO_OPTS)
if self.login is not None and self.password is not None:
client[self.dbname].authenticate(self.login, self.password)
else:
logger.info('Connecting to MongoDB over TLS/SSL...')
client = pymongo.MongoClient(self.host,
self.port,
replicaset=self.replicaset,
serverselectiontimeoutms=self.connection_timeout,
ssl=self.ssl,
ssl_ca_certs=self.ca_cert,
ssl_certfile=self.certfile,
ssl_keyfile=self.keyfile,
ssl_pem_passphrase=self.keyfile_passphrase,
ssl_crlfile=self.crlfile,
ssl_cert_reqs=CERT_REQUIRED,
**MONGO_OPTS)
if self.login is not None:
client[self.dbname].authenticate(self.login,
mechanism='MONGODB-X509')
return client
except (pymongo.errors.ConnectionFailure,
pymongo.errors.OperationFailure) as exc:
logger.info('Exception in _connect(): {}'.format(exc))
raise ConnectionError(str(exc)) from exc
except pymongo.errors.ConfigurationError as exc:
raise ConfigurationError from exc
|
https://github.com/bigchaindb/bigchaindb/blob/835fdfcf598918f76139e3b88ee33dd157acaaa7/bigchaindb/backend/localmongodb/connection.py#L77-L130
|
postgresql connection
|
python
|
def connect(self, config):
"""Connect to database with given configuration, which may be a dict or
a path to a pymatgen-db configuration.
"""
if isinstance(config, str):
conn = dbutil.get_database(config_file=config)
elif isinstance(config, dict):
conn = dbutil.get_database(settings=config)
else:
raise ValueError("Configuration, '{}', must be a path to "
"a configuration file or dict".format(config))
return conn
|
https://github.com/materialsproject/pymatgen-db/blob/02e4351c2cea431407644f49193e8bf43ed39b9a/matgendb/builders/core.py#L369-L380
|
postgresql connection
|
python
|
def _get_connection(self, key, host='', port='', user='', password='', database_url='', tags=None, use_cached=True):
"Get and memoize connections to instances"
if key in self.dbs and use_cached:
return self.dbs[key]
try:
connect_kwargs = self._get_connect_kwargs(
host=host, port=port, user=user, password=password, database_url=database_url
)
connection = pg.connect(**connect_kwargs)
connection.set_isolation_level(pg.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
# re-raise the CheckExceptions raised by _get_connect_kwargs()
except CheckException:
raise
except Exception:
redacted_url = self._get_redacted_dsn(host, port, user, database_url)
message = u'Cannot establish connection to {}'.format(redacted_url)
self.service_check(
self.SERVICE_CHECK_NAME,
AgentCheck.CRITICAL,
tags=self._get_service_checks_tags(host, port, database_url, tags),
message=message,
)
raise
self.dbs[key] = connection
return connection
|
https://github.com/DataDog/integrations-core/blob/ebd41c873cf9f97a8c51bf9459bc6a7536af8acd/pgbouncer/datadog_checks/pgbouncer/pgbouncer.py#L159-L188
|
postgresql connection
|
python
|
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
|
https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L30-L36
|
postgresql connection
|
python
|
def present(name,
user=None,
password=None,
auth='password',
encoding='UTF8',
locale=None,
runas=None,
waldir=None,
checksums=False):
'''
Initialize the PostgreSQL data directory
name
The name of the directory to initialize
user
The database superuser name
password
The password to set for the postgres user
auth
The default authentication method for local connections
encoding
The default encoding for new databases
locale
The default locale for new databases
waldir
The transaction log (WAL) directory (default is to keep WAL
inside the data directory)
.. versionadded:: 2019.2.0
checksums
If True, the cluster will be created with data page checksums.
.. note:: Data page checksums are supported since PostgreSQL 9.3.
.. versionadded:: 2019.2.0
runas
The system user the operation should be performed on behalf of
'''
_cmt = 'Postgres data directory {0} is already present'.format(name)
ret = {
'name': name,
'changes': {},
'result': True,
'comment': _cmt}
if not __salt__['postgres.datadir_exists'](name=name):
if __opts__['test']:
ret['result'] = None
_cmt = 'Postgres data directory {0} is set to be initialized'\
.format(name)
ret['comment'] = _cmt
return ret
kwargs = dict(
user=user,
password=password,
auth=auth,
encoding=encoding,
locale=locale,
waldir=waldir,
checksums=checksums,
runas=runas)
if __salt__['postgres.datadir_init'](name, **kwargs):
_cmt = 'Postgres data directory {0} has been initialized'\
.format(name)
ret['comment'] = _cmt
ret['changes'][name] = 'Present'
else:
_cmt = 'Postgres data directory {0} initialization failed'\
.format(name)
ret['result'] = False
ret['comment'] = _cmt
return ret
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/postgres_initdb.py#L39-L121
|
postgresql connection
|
python
|
def connect_to_database_odbc_mysql(self,
database: str,
user: str,
password: str,
server: str = "localhost",
port: int = 3306,
driver: str = "{MySQL ODBC 5.1 Driver}",
autocommit: bool = True) -> None:
"""Connects to a MySQL database via ODBC."""
self.connect(engine=ENGINE_MYSQL, interface=INTERFACE_ODBC,
database=database, user=user, password=password,
host=server, port=port, driver=driver,
autocommit=autocommit)
|
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/rnc_db.py#L1969-L1981
|
postgresql connection
|
python
|
def CheckMySQLConnection(db_options):
"""Checks whether a connection can be established to MySQL.
Args:
db_options: A dict mapping GRR MySQL config options to their values.
Returns:
A boolean indicating whether a connection could be made to a MySQL server
instance with the given options.
"""
for tries_left in range(_MYSQL_MAX_RETRIES, -1, -1):
try:
connection_options = dict(
host=db_options["Mysql.host"],
port=db_options["Mysql.port"],
db=db_options["Mysql.database_name"],
user=db_options["Mysql.database_username"],
passwd=db_options["Mysql.database_password"],
charset="utf8")
ssl_enabled = "Mysql.client_key_path" in db_options
if ssl_enabled:
connection_options["ssl"] = {
"key": db_options["Mysql.client_key_path"],
"cert": db_options["Mysql.client_cert_path"],
"ca": db_options["Mysql.ca_cert_path"],
}
connection = MySQLdb.connect(**connection_options)
if ssl_enabled:
cursor = connection.cursor()
cursor.execute("SHOW VARIABLES LIKE 'have_ssl'")
res = cursor.fetchone()
if res[0] == "have_ssl" and res[1] == "YES":
print("SSL enabled successfully.")
else:
print("Unable to establish SSL connection to MySQL.")
return False
return True
except MySQLdb.OperationalError as mysql_op_error:
if len(mysql_op_error.args) < 2:
# We expect the exception's arguments to be an error-code and
# an error message.
print("Unexpected exception type received from MySQL. %d attempts "
"left: %s" % (tries_left, mysql_op_error))
time.sleep(_MYSQL_RETRY_WAIT_SECS)
continue
if mysql_op_error.args[0] == mysql_conn_errors.CONNECTION_ERROR:
print("Failed to connect to MySQL. Is it running? %d attempts left." %
tries_left)
elif mysql_op_error.args[0] == mysql_conn_errors.UNKNOWN_HOST:
print("Unknown-hostname error encountered while trying to connect to "
"MySQL.")
return False # No need for retry.
elif mysql_op_error.args[0] == general_mysql_errors.BAD_DB_ERROR:
# GRR db doesn't exist yet. That's expected if this is the initial
# setup.
return True
elif mysql_op_error.args[0] in (
general_mysql_errors.ACCESS_DENIED_ERROR,
general_mysql_errors.DBACCESS_DENIED_ERROR):
print("Permission error encountered while trying to connect to "
"MySQL: %s" % mysql_op_error)
return False # No need for retry.
else:
print("Unexpected operational error encountered while trying to "
"connect to MySQL. %d attempts left: %s" %
(tries_left, mysql_op_error))
except MySQLdb.Error as mysql_error:
print("Unexpected error encountered while trying to connect to MySQL. "
"%d attempts left: %s" % (tries_left, mysql_error))
time.sleep(_MYSQL_RETRY_WAIT_SECS)
return False
|
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/bin/config_updater_util.py#L175-L249
|
postgresql connection
|
python
|
def connect(self, fn):
"""SQLite connect method initialize db"""
self.conn = sqlite3.connect(fn)
cur = self.get_cursor()
cur.execute('PRAGMA page_size=4096')
cur.execute('PRAGMA FOREIGN_KEYS=ON')
cur.execute('PRAGMA cache_size=10000')
cur.execute('PRAGMA journal_mode=MEMORY')
|
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L384-L391
|
postgresql connection
|
python
|
def ready(self):
"""
Assumes postgres now talks to pg_ctl, but might not yet be listening
or connections from psql. Test that psql is able to connect, as
it occasionally takes 5-10 seconds for postgresql to start listening.
"""
cmd = self._psql_cmd()
for i in range(50, -1, -1):
res = subprocess.call(
cmd, stdin=DEV_NULL, stdout=DEV_NULL,
stderr=DEV_NULL)
if res == 0:
break
time.sleep(0.2)
return i != 0
|
https://github.com/jaraco/jaraco.postgres/blob/57375043314a3ce821ac3b0372ba2465135daa95/jaraco/postgres/__init__.py#L467-L481
|
postgresql connection
|
python
|
def parse_connection_string(value):
"""Original Governor stores connection strings for each cluster members if a following format:
postgres://{username}:{password}@{connect_address}/postgres
Since each of our patroni instances provides own REST API endpoint it's good to store this information
in DCS among with postgresql connection string. In order to not introduce new keys and be compatible with
original Governor we decided to extend original connection string in a following way:
postgres://{username}:{password}@{connect_address}/postgres?application_name={api_url}
This way original Governor could use such connection string as it is, because of feature of `libpq` library.
This method is able to split connection string stored in DCS into two parts, `conn_url` and `api_url`"""
scheme, netloc, path, params, query, fragment = urlparse(value)
conn_url = urlunparse((scheme, netloc, path, params, '', fragment))
api_url = ([v for n, v in parse_qsl(query) if n == 'application_name'] or [None])[0]
return conn_url, api_url
|
https://github.com/zalando/patroni/blob/f6d29081c90af52064b981cdd877a07338d86038/patroni/dcs/__init__.py#L42-L56
|
postgresql connection
|
python
|
def connect(self, database_path, mode="a"):
"""
Connect to a SQLite database.
:param str database_path:
Path to the SQLite database file to be connected.
:param str mode:
``"r"``: Open for read only.
``"w"``: Open for read/write.
Delete existing tables when connecting.
``"a"``: Open for read/write. Append to the existing tables.
:raises ValueError:
If ``database_path`` is invalid or |attr_mode| is invalid.
:raises simplesqlite.DatabaseError:
If the file is encrypted or is not a database.
:raises simplesqlite.OperationalError:
If unable to open the database file.
"""
self.close()
logger.debug("connect to a SQLite database: path='{}', mode={}".format(database_path, mode))
if mode == "r":
self.__verify_db_file_existence(database_path)
elif mode in ["w", "a"]:
self.__validate_db_path(database_path)
else:
raise ValueError("unknown connection mode: " + mode)
if database_path == MEMORY_DB_NAME:
self.__database_path = database_path
else:
self.__database_path = os.path.realpath(database_path)
try:
self.__connection = sqlite3.connect(database_path)
except sqlite3.OperationalError as e:
raise OperationalError(e)
self.__mode = mode
try:
# validate connection after connect
self.fetch_table_names()
except sqlite3.DatabaseError as e:
raise DatabaseError(e)
if mode != "w":
return
for table in self.fetch_table_names():
self.drop_table(table)
|
https://github.com/thombashi/SimpleSQLite/blob/b16f212132b9b98773e68bf7395abc2f60f56fe5/simplesqlite/core.py#L216-L268
|
postgresql connection
|
python
|
def database(
state, host, name,
present=True, owner=None,
template=None, encoding=None,
lc_collate=None, lc_ctype=None, tablespace=None,
connection_limit=None,
# Details for speaking to PostgreSQL via `psql` CLI
postgresql_user=None, postgresql_password=None,
postgresql_host=None, postgresql_port=None,
):
'''
Add/remove PostgreSQL databases.
+ name: name of the database
+ present: whether the database should exist or not
+ owner: the PostgreSQL role that owns the database
+ template: name of the PostgreSQL template to use
+ encoding: encoding of the database
+ lc_collate: lc_collate of the database
+ lc_ctype: lc_ctype of the database
+ tablespace: the tablespace to use for the template
+ connection_limit: the connection limit to apply to the database
+ postgresql_*: global module arguments, see above
Updates:
pyinfra will not attempt to change existing databases - it will either
create or drop databases, but not alter them (if the db exists this
operation will make no changes).
'''
current_databases = host.fact.postgresql_databases(
postgresql_user, postgresql_password,
postgresql_host, postgresql_port,
)
is_present = name in current_databases
if not present:
if is_present:
yield make_execute_psql_command(
'DROP DATABASE {0}'.format(name),
user=postgresql_user,
password=postgresql_password,
host=postgresql_host,
port=postgresql_port,
)
return
# We want the database but it doesn't exist
if present and not is_present:
sql_bits = ['CREATE DATABASE {0}'.format(name)]
for key, value in (
('OWNER', owner),
('TEMPLATE', template),
('ENCODING', encoding),
('LC_COLLATE', lc_collate),
('LC_CTYPE', lc_ctype),
('TABLESPACE', tablespace),
('CONNECTION LIMIT', connection_limit),
):
if value:
sql_bits.append('{0} {1}'.format(key, value))
yield make_execute_psql_command(
' '.join(sql_bits),
user=postgresql_user,
password=postgresql_password,
host=postgresql_host,
port=postgresql_port,
)
|
https://github.com/Fizzadar/pyinfra/blob/006f751f7db2e07d32522c0285160783de2feb79/pyinfra/modules/postgresql.py#L127-L197
|
postgresql connection
|
python
|
def setup(settings):
"""
Setup the database connection.
"""
connector = settings.get('db_connector')
if connector == 'postgres':
from playhouse.pool import PooledPostgresqlExtDatabase
return PooledPostgresqlExtDatabase(settings['db_name'],
user=settings['db_user'],
password=settings['db_password'],
host=settings['db_host'],
port=settings.get('db_port'),
max_connections=settings.get('db_max_conn'),
stale_timeout=settings.get('db_stale_timeout'),
timeout=settings.get('db_timeout'),
register_hstore=False)
|
https://github.com/baguette-io/baguette-messaging/blob/8d1c4707ea7eace8617fed2d97df2fcc9d0cdee1/farine/connectors/sql/__init__.py#L31-L46
|
postgresql connection
|
python
|
def connection(filepath=None, section='oep'):
"""
Instantiate a database connection (for the use with SQLAlchemy).
The keyword argument `filepath` specifies the location of the config file
that contains database connection information. If not given, the default
of `~/.egoio/config.ini` applies.
Parameters
----------
filepath : str
Absolute path of config file including the filename itself
Returns
-------
conn : sqlalchemy.engine
SQLalchemy engine object containing the connection details
"""
# define default filepath if not provided
if filepath is None:
filepath = os.path.join(os.path.expanduser("~"), '.egoio', 'config.ini')
# does the file exist?
if not os.path.isfile(filepath):
print('DB config file {file} not found. '
'This might be the first run of the tool. '
.format(file=filepath))
cfg = create_oedb_config_file(filepath, section=section)
else:
cfg = readcfg(filepath, section)
try:
pw = cfg.get(section, "password")
except:
pw = keyring.get_password(section,
cfg.get(section, "username"))
if pw is None:
pw = getpass.getpass(prompt='No password found for database "{db}". '
'Enter your password to '
'store it in keyring: '
.format(db=cfg.get(section, 'database')))
keyring.set_password(section, cfg.get(section, "username"), pw)
# establish connection and return it
conn = create_engine(
"postgresql+{dialect}://{user}:{password}@{host}:{port}/{db}".format(
dialect=cfg.get(section, 'dialect', fallback='psycopg2'),
user=cfg.get(section, 'username'),
password=pw,
host=cfg.get(section, 'host'),
port=cfg.get(section, 'port'),
db=cfg.get(section, 'database')))
return conn
|
https://github.com/openego/ego.io/blob/35c472914ee62eff37ddb8e69be3d83276cf2d42/egoio/tools/db.py#L185-L239
|
postgresql connection
|
python
|
def _poll_connection(self, fd):
"""Check with psycopg2 to see what action to take. If the state is
POLL_OK, we should have a pending callback for that fd.
:param int fd: The socket fd for the postgresql connection
"""
try:
state = self._connections[fd].poll()
except (OSError, socket.error) as error:
self._ioloop.remove_handler(fd)
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(
psycopg2.OperationalError('Connection error (%s)' % error)
)
except (psycopg2.Error, psycopg2.Warning) as error:
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(error)
else:
if state == extensions.POLL_OK:
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_result(True)
elif state == extensions.POLL_WRITE:
self._ioloop.update_handler(fd, ioloop.IOLoop.WRITE)
elif state == extensions.POLL_READ:
self._ioloop.update_handler(fd, ioloop.IOLoop.READ)
elif state == extensions.POLL_ERROR:
self._ioloop.remove_handler(fd)
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(
psycopg2.Error('Poll Error'))
|
https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/tornado_session.py#L480-L510
|
postgresql connection
|
python
|
def connect(*args, **kwargs):
"""Create database connection, use TraceCursor as the cursor_factory."""
kwargs['cursor_factory'] = TraceCursor
conn = pg_connect(*args, **kwargs)
return conn
|
https://github.com/census-instrumentation/opencensus-python/blob/992b223f7e34c5dcb65922b7d5c827e7a1351e7d/contrib/opencensus-ext-postgresql/opencensus/ext/postgresql/trace.py#L42-L46
|
postgresql connection
|
python
|
def ping(self) -> None:
"""Pings a database connection, reconnecting if necessary."""
if self.db is None or self.db_pythonlib not in [PYTHONLIB_MYSQLDB,
PYTHONLIB_PYMYSQL]:
return
try:
self.db.ping(True) # test connection; reconnect upon failure
# ... should auto-reconnect; however, it seems to fail the first
# time, then work the next time.
# Exception (the first time) is:
# <class '_mysql_exceptions.OperationalError'>:
# (2006, 'MySQL server has gone away')
# http://mail.python.org/pipermail/python-list/2008-February/
# 474598.html
except mysql.OperationalError: # loss of connection
self.db = None
self.connect_to_database_mysql(
self._database, self._user, self._password, self._server,
self._port, self._charset, self._use_unicode)
|
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/rnc_db.py#L1931-L1949
|
postgresql connection
|
python
|
def reconnect(self, query = None, log_reconnect = False):
"""
Reconnect to the database.
"""
uri = list(urisup.uri_help_split(self.sqluri))
if uri[1]:
authority = list(uri[1])
if authority[1]:
authority[1] = None
uri[1] = authority
if log_reconnect:
LOG.warning('reconnecting to %r database (query: %r)', urisup.uri_help_unsplit(uri), query)
self.__connect()
|
https://github.com/decryptus/sonicprobe/blob/72f73f3a40d2982d79ad68686e36aa31d94b76f8/sonicprobe/libs/anysql.py#L374-L387
|
postgresql connection
|
python
|
def database(self, name=None):
"""Connect to a database called `name`.
Parameters
----------
name : str, optional
The name of the database to connect to. If ``None``, return
the database named ``self.current_database``.
Returns
-------
db : MySQLDatabase
An :class:`ibis.sql.mysql.client.MySQLDatabase` instance.
Notes
-----
This creates a new connection if `name` is both not ``None`` and not
equal to the current database.
"""
if name == self.current_database or (
name is None and name != self.current_database
):
return self.database_class(self.current_database, self)
else:
url = self.con.url
client_class = type(self)
new_client = client_class(
host=url.host,
user=url.username,
port=url.port,
password=url.password,
database=name,
)
return self.database_class(name, new_client)
|
https://github.com/ibis-project/ibis/blob/1e39a5fd9ef088b45c155e8a5f541767ee8ef2e7/ibis/sql/mysql/client.py#L108-L141
|
postgresql connection
|
python
|
def sync_local_to_remote(force="no"):
"""
Sync your local postgres database with remote
Example:
fabrik prod sync_local_to_remote:force=yes
"""
_check_requirements()
if force != "yes":
message = "This will replace the remote database '%s' with your "\
"local '%s', are you sure [y/n]" % (env.psql_db, env.local_psql_db)
answer = prompt(message, "y")
if answer != "y":
logger.info("Sync stopped")
return
init_tasks() # Bootstrap fabrik
# Create database dump
local_file = "sync_%s.sql.tar.gz" % int(time.time()*1000)
local_path = "/tmp/%s" % local_file
with context_managers.shell_env(PGPASSWORD=env.local_psql_password):
elocal("pg_dump -h localhost -Fc -f %s -U %s %s -x -O" % (
local_path, env.local_psql_user, env.local_psql_db
))
remote_path = "/tmp/%s" % local_file
# Upload sync file
put(remote_path, local_path)
# Import sync file by performing the following task (drop, create, import)
with context_managers.shell_env(PGPASSWORD=env.psql_password):
env.run("pg_restore --clean -h localhost -d %s -U %s '%s'" % (
env.psql_db,
env.psql_user,
remote_path)
)
# Cleanup
env.run("rm %s" % remote_path)
elocal("rm %s" % local_path)
# Trigger hook
run_hook("postgres.after_sync_local_to_remote")
logger.info("Sync complete")
|
https://github.com/Frojd/Fabrik/blob/9f2edbba97a7fd236b72a9b3010f6e912ab5c001/fabrik/ext/postgres.py#L112-L162
|
postgresql connection
|
python
|
def connection_from_ndb_query(query, args=None, connection_type=None, edge_type=None, pageinfo_type=None,
transform_edges=None, context=None, **kwargs):
'''
A simple function that accepts an ndb Query and used ndb QueryIterator object(https://cloud.google.com/appengine/docs/python/ndb/queries#iterators)
to returns a connection object for use in GraphQL.
It uses array offsets as pagination,
so pagination will only work if the array is static.
'''
args = args or {}
connection_type = connection_type or Connection
edge_type = edge_type or Edge
pageinfo_type = pageinfo_type or PageInfo
full_args = dict(args, **kwargs)
first = full_args.get('first')
after = full_args.get('after')
has_previous_page = bool(after)
keys_only = full_args.get('keys_only', False)
batch_size = full_args.get('batch_size', 20)
page_size = first if first else full_args.get('page_size', 20)
start_cursor = ndb.Cursor(urlsafe=after) if after else None
ndb_iter = query.iter(produce_cursors=True, start_cursor=start_cursor, batch_size=batch_size, keys_only=keys_only, projection=query.projection)
edges = []
while len(edges) < page_size:
missing_edges_count = page_size - len(edges)
edges_page = generate_edges_page(ndb_iter, missing_edges_count, keys_only, edge_type)
edges.extend(transform_edges(edges_page, args, context) if transform_edges else edges_page)
if len(edges_page) < missing_edges_count:
break
try:
end_cursor = ndb_iter.cursor_after().urlsafe()
except BadArgumentError:
end_cursor = None
# Construct the connection
return connection_type(
edges=edges,
page_info=pageinfo_type(
start_cursor=start_cursor.urlsafe() if start_cursor else '',
end_cursor=end_cursor,
has_previous_page=has_previous_page,
has_next_page=ndb_iter.has_next()
)
)
|
https://github.com/graphql-python/graphene-gae/blob/a223d10b7558c7e8e6d190cd1297eba54878c4c8/graphene_gae/ndb/fields.py#L47-L95
|
postgresql connection
|
python
|
def get_connection(**kwargs):
"""
Connects to the db using pymapd
https://pymapd.readthedocs.io/en/latest/usage.html#connecting
Kwargs:
db_user(str): DB username
db_passwd(str): DB password
db_server(str): DB host
db_port(int): DB port
db_name(str): DB name
Returns:
con(class): Connection class
False(bool): The connection failed. Exception should be logged.
"""
try:
logging.debug("Connecting to mapd db...")
con = pymapd.connect(
user=kwargs["db_user"],
password=kwargs["db_passwd"],
host=kwargs["db_server"],
port=kwargs["db_port"],
dbname=kwargs["db_name"],
)
logging.info("Succesfully connected to mapd db")
return con
except (pymapd.exceptions.OperationalError, pymapd.exceptions.Error):
logging.exception("Error connecting to database.")
return False
|
https://github.com/omnisci/mapd-core/blob/83e87035696968748a61ff561c19631d55a3b902/Benchmarks/run-benchmark.py#L16-L45
|
postgresql connection
|
python
|
def is_disconnect(e, connection, cursor):
"""
Connection state check from SQLAlchemy:
https://bitbucket.org/sqlalchemy/sqlalchemy/src/tip/lib/sqlalchemy/dialects/postgresql/psycopg2.py
"""
if isinstance(e, OperationalError):
# these error messages from libpq: interfaces/libpq/fe-misc.c.
# TODO: these are sent through gettext in libpq and we can't
# check within other locales - consider using connection.closed
return 'terminating connection' in str(e) or \
'closed the connection' in str(e) or \
'connection not open' in str(e) or \
'could not receive data from server' in str(e)
elif isinstance(e, InterfaceError):
# psycopg2 client errors, psycopg2/conenction.h, psycopg2/cursor.h
return 'connection already closed' in str(e) or \
'cursor already closed' in str(e)
elif isinstance(e, ProgrammingError):
# not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
return "closed the connection unexpectedly" in str(e)
else:
return False
|
https://github.com/heroku-python/django-postgrespool/blob/ce83a4d49c19eded86d86d5fcfa8daaeea5ef662/django_postgrespool/base.py#L42-L64
|
postgresql connection
|
python
|
def as_sql(self, qn, connection):
"""
Create the proper SQL fragment. This inserts something like
"(T0.flags & value) != 0".
This will be called by Where.as_sql()
"""
engine = connection.settings_dict['ENGINE'].rsplit('.', -1)[-1]
if engine.startswith('postgres'):
XOR_OPERATOR = '#'
elif engine.startswith('sqlite'):
raise NotImplementedError
else:
XOR_OPERATOR = '^'
if self.bit:
return ("%s.%s | %d" % (qn(self.table_alias), qn(self.column), self.bit.mask),
[])
return ("%s.%s %s %d" % (qn(self.table_alias), qn(self.column), XOR_OPERATOR, self.bit.mask),
[])
|
https://github.com/disqus/django-bitfield/blob/a6502aa1cb810620f801e282dc5a7330064fbbf5/bitfield/query.py#L53-L72
|
postgresql connection
|
python
|
def get_connection(self):
"""Get a connection to this Database. Connections are retrieved from a
pool.
"""
if not self.open:
raise exc.ResourceClosedError('Database closed.')
return Connection(self._engine.connect())
|
https://github.com/kennethreitz/records/blob/ecd857266c5e7830d657cbe0196816314790563b/records.py#L285-L292
|
postgresql connection
|
python
|
def _MakeConnection(self, database=""):
"""Repeat connection attempts to server until we get a valid connection."""
first_attempt_time = time.time()
wait_time = config.CONFIG["Mysql.max_connect_wait"]
while wait_time == 0 or time.time() - first_attempt_time < wait_time:
try:
connection_args = dict(
user=config.CONFIG["Mysql.database_username"],
db=database,
charset="utf8",
passwd=config.CONFIG["Mysql.database_password"],
autocommit=True,
cursorclass=cursors.DictCursor,
host=config.CONFIG["Mysql.host"],
port=config.CONFIG["Mysql.port"])
key_path = config.CONFIG["Mysql.client_key_path"]
if key_path:
cert_path = config.CONFIG["Mysql.client_cert_path"]
ca_cert_path = config.CONFIG["Mysql.ca_cert_path"]
logging.debug("Client key file configured, trying to use SSL.")
connection_args["ssl"] = {
"key": key_path,
"cert": cert_path,
"ca": ca_cert_path,
}
return MySQLdb.connect(**connection_args)
except MySQLdb.OperationalError as e:
# This is a fatal error, we just raise the top level exception here.
if "Access denied" in str(e):
raise Error(str(e))
if "Can't connect" in str(e):
logging.warning("Datastore connection retrying after failed with %s.",
str(e))
time.sleep(.5)
continue
raise
raise IOError("Unable to connect to Mysql database.")
|
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/data_stores/mysql_advanced_data_store.py#L87-L129
|
postgresql connection
|
python
|
def commit(self):
'''
This just calls the connection's commit method.
'''
if not self.connection.closed:
self.connection.commit()
else:
raise AttributeError('postgres connection to %s is closed' %
self.database)
|
https://github.com/waqasbhatti/astrobase/blob/2922a14619d183fb28005fa7d02027ac436f2265/astrobase/lcdb.py#L362-L371
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.