pydal.adapters package

Submodules

pydal.adapters.base module

class pydal.adapters.base.BaseAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.connection.ConnectionPool

adapt(value)[source]
close_connection(**kwargs)
common_filter(query, tablist)[source]
connector()[source]
dbengine = 'None'
drivers = ()
drop_table(table, mode='')[source]
expand_all(fields, tabledict)[source]
find_driver()[source]
get_table(*queries)[source]
iterparse(sql, fields, colnames, blob_decode=True, cacheable=False)[source]

Iterator to parse one row at a time. It doesn’t support the old style virtual fields

parse(rows, fields, colnames, blob_decode=True, cacheable=False)[source]
parse_value(value, field_itype, field_type, blob_decode=True)[source]
represent(obj, field_type)[source]
rowslice(rows, minimum=0, maximum=None)[source]
sqlsafe_field(fieldname)[source]
sqlsafe_table(tablename, original_tablename=None)[source]
support_distributed_transaction = False
tables(*queries)[source]
test_connection()[source]
types
uploads_in_blob = False
class pydal.adapters.base.DebugHandler(adapter)[source]

Bases: pydal.helpers.classes.ExecutionHandler

before_execute(command)[source]
class pydal.adapters.base.NoSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.adapters.base.BaseAdapter

can_select_for_update = False
commit()[source]
commit_prepared(key)[source]
create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
drop(**kwargs)
drop_table(table, mode='')[source]
id_query(table)[source]
nested_select(*args, **kwargs)[source]
prepare()[source]
rollback()[source]
rollback_prepared(key)[source]
class pydal.adapters.base.NullAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.adapters.base.BaseAdapter

connector()[source]
find_driver()[source]
class pydal.adapters.base.SQLAdapter(*args, **kwargs)[source]

Bases: pydal.adapters.base.BaseAdapter

adapt(obj)[source]
bulk_insert(table, items)[source]
can_select_for_update = True
commit(**kwargs)
commit_on_alter_table = False
commit_prepared(**kwargs)
count(query, distinct=None)[source]
create_index(table, index_name, *fields, **kwargs)[source]
create_sequence_and_triggers(query, table, **args)[source]
create_table(*args, **kwargs)[source]
delete(table, query)[source]
distributed_transaction_begin(key)[source]
drop(**kwargs)
drop_index(table, index_name)[source]
drop_table(table, mode='')[source]
execute(**kwargs)
execution_handlers = []
fetchall()[source]
fetchone()[source]
filter_sql_command(command)[source]
id_query(table)[source]
index_expander(**kwds)[source]
insert(table, fields)[source]
iterselect(query, fields, attributes)[source]
lastrowid(table)[source]
migrator_cls

alias of pydal.migrator.Migrator

nested_select(query, fields, attributes)[source]
prepare(**kwargs)
represent(obj, field_type)[source]
rollback(**kwargs)
rollback_prepared(**kwargs)
select(query, fields, attributes)[source]
smart_adapt(obj)[source]
sqlsafe_field(fieldname)[source]
sqlsafe_table(tablename, original_tablename=None)[source]
table_alias(tbl, current_scope=[])[source]
test_connection()[source]
truncate(table, mode='')[source]
update(table, query, fields)[source]

pydal.adapters.couchdb module

class pydal.adapters.couchdb.CouchDB(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.adapters.base.NoSQLAdapter

connector()[source]
count(query, distinct=None)[source]
create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
dbengine = 'couchdb'
delete(table, query)[source]
drivers = ('couchdb',)
insert(table, fields)[source]
select(query, fields, attributes)[source]
update(table, query, fields)[source]
uploads_in_blob = True

pydal.adapters.cubrid module

pydal.adapters.db2 module

class pydal.adapters.db2.DB2(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

dbengine = 'db2'
execute(**kwargs)
lastrowid(table)[source]
rowslice(rows, minimum=0, maximum=None)[source]
class pydal.adapters.db2.DB2IBM(*args, **kwargs)[source]

Bases: pydal.adapters.db2.DB2

connector()[source]
drivers = ('ibm_db_dbi',)
class pydal.adapters.db2.DB2Pyodbc(*args, **kwargs)[source]

Bases: pydal.adapters.db2.DB2

connector()[source]
drivers = ('pyodbc',)

pydal.adapters.firebird module

class pydal.adapters.firebird.FireBird(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x21ea0c0>
commit_on_alter_table = True
connector()[source]
create_sequence_and_triggers(query, table, **args)[source]
dbengine = 'firebird'
drivers = ('kinterbasdb', 'firebirdsql', 'fdb', 'pyodbc')
lastrowid(table)[source]
support_distributed_transaction = True
class pydal.adapters.firebird.FireBirdEmbedded(*args, **kwargs)[source]

Bases: pydal.adapters.firebird.FireBird

REGEX_URI = <_sre.SRE_Pattern object at 0x21e8ca0>

pydal.adapters.google_adapters module

Adapter for GAE

pydal.adapters.imap module

pydal.adapters.informix module

class pydal.adapters.informix.Informix(*args, **kwargs)[source]

Bases: pydal.helpers.classes.ConnectionConfigurationMixin, pydal.adapters.base.SQLAdapter

connector()[source]
dbengine = 'informix'
drivers = ('informixdb',)
execute(**kwargs)
lastrowid(table)[source]
test_connection()[source]
class pydal.adapters.informix.InformixSE(*args, **kwargs)[source]

Bases: pydal.adapters.informix.Informix

rowslice(rows, minimum=0, maximum=None)[source]

pydal.adapters.ingres module

class pydal.adapters.ingres.Ingres(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

connector()[source]
create_sequence_and_triggers(query, table, **args)[source]
dbengine = 'ingres'
drivers = ('pyodbc',)
class pydal.adapters.ingres.IngresUnicode(*args, **kwargs)[source]

Bases: pydal.adapters.ingres.Ingres

pydal.adapters.mongo module

class pydal.adapters.mongo.Binary[source]

Bases: object

class pydal.adapters.mongo.Expansion(adapter, crud, query, fields=(), tablename=None, groupby=None, distinct=False, having=None)[source]

Bases: object

Class to encapsulate a pydal expression and track the parse expansion and its results.

Two different MongoDB mechanisms are targeted here. If the query is sufficiently simple, then simple queries are generated. The bulk of the complexity here is however to support more complex queries that are targeted to the MongoDB Aggregation Pipeline.

This class supports four operations: ‘count’, ‘select’, ‘update’ and ‘delete’.

Behavior varies somewhat for each operation type. However building each pipeline stage is shared where the behavior is the same (or similar) for the different operations.

In general an attempt is made to build the query without using the pipeline, and if that fails then the query is rebuilt with the pipeline.

QUERY constructed in _build_pipeline_query():
$project : used to calculate expressions if needed $match: filters out records
FIELDS constructed in _expand_fields():
FIELDS:COUNT
$group : filter for distinct if needed $group: count the records remaining
FIELDS:SELECT
$group : implement aggregations if needed $project: implement expressions (etc) for select
FIELDS:UPDATE
$project: implement expressions (etc) for update
HAVING constructed in _add_having():
$project : used to calculate expressions $match: filters out records $project : used to filter out previous expression fields
annotate_expression(expression)[source]
dialect
get_collection(safe=None)[source]
class pydal.adapters.mongo.Mongo(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.helpers.classes.ConnectionConfigurationMixin, pydal.adapters.base.NoSQLAdapter

bulk_insert(table, items)[source]
check_notnull(table, values)[source]
check_unique(table, values)[source]
connector()[source]
count(query, distinct=None, snapshot=True)[source]
dbengine = 'mongodb'
delete(table, query, safe=None)[source]
drivers = ('pymongo',)
find_driver()[source]
insert(table, fields, safe=None)[source]

Safe determines whether a asynchronous request is done or a synchronous action is done For safety, we use by default synchronous requests

object_id(arg=None)[source]

Convert input to a valid Mongodb ObjectId instance

self.object_id(“<random>”) -> ObjectId (not unique) instance

represent(obj, field_type)[source]
select(query, fields, attributes, snapshot=False)[source]
truncate(table, mode, safe=None)[source]
update(table, query, fields, safe=None)[source]
class pydal.adapters.mongo.MongoBlob[source]

Bases: pydal.adapters.mongo.Binary

MONGO_BLOB_BYTES = 0
MONGO_BLOB_NON_UTF8_STR = 1
static decode(value)[source]

pydal.adapters.mssql module

class pydal.adapters.mssql.MSSQL(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.base.SQLAdapter

REGEX_ARGPATTERN = <_sre.SRE_Pattern object>
REGEX_DSN = <_sre.SRE_Pattern object>
REGEX_URI = <_sre.SRE_Pattern object at 0x223c4c0>
connector()[source]
dbengine = 'mssql'
drivers = ('pyodbc',)
lastrowid(table)[source]
class pydal.adapters.mssql.MSSQL1(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL, pydal.adapters.mssql.Slicer

class pydal.adapters.mssql.MSSQL1N(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLN, pydal.adapters.mssql.Slicer

class pydal.adapters.mssql.MSSQL3(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL

class pydal.adapters.mssql.MSSQL3N(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLN

class pydal.adapters.mssql.MSSQL4(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL

class pydal.adapters.mssql.MSSQL4N(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLN

class pydal.adapters.mssql.MSSQLN(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL

execute(**kwargs)
represent(obj, field_type)[source]
class pydal.adapters.mssql.Slicer[source]

Bases: object

rowslice(rows, minimum=0, maximum=None)[source]
class pydal.adapters.mssql.Sybase(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL1

connector()[source]
dbengine = 'sybase'
class pydal.adapters.mssql.Vertica(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL1

lastrowid(table)[source]

pydal.adapters.mysql module

class pydal.adapters.mysql.Cubrid(*args, **kwargs)[source]

Bases: pydal.adapters.mysql.MySQL

dbengine = 'cubrid'
drivers = ('cubriddb',)
class pydal.adapters.mysql.MySQL(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x22618b0>
after_connection()[source]
commit_on_alter_table = True
commit_prepared(**kwargs)
connector()[source]
dbengine = 'mysql'
distributed_transaction_begin(key)[source]
drivers = ('MySQLdb', 'pymysql', 'mysqlconnector')
prepare(**kwargs)
rollback_prepared(**kwargs)
support_distributed_transaction = True

pydal.adapters.oracle module

class pydal.adapters.oracle.Oracle(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

after_connection()[source]
cmd_fix = <_sre.SRE_Pattern object>
connector()[source]
create_sequence_and_triggers(query, table, **args)[source]
dbengine = 'oracle'
drivers = ('cx_Oracle',)
execute(**kwargs)
fetchall()[source]
insert(table, fields)[source]
lastrowid(table)[source]
sqlsafe_table(tablename, original_tablename=None)[source]
test_connection()[source]

pydal.adapters.postgres module

class pydal.adapters.postgres.JDBCPostgre(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

REGEX_URI = <_sre.SRE_Pattern object>
after_connection()[source]
connector()[source]
drivers = ('zxJDBC',)
class pydal.adapters.postgres.Postgre(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.helpers.classes.ConnectionConfigurationMixin, pydal.adapters.base.SQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x196c9c0>
after_connection()[source]
commit_prepared(**kwargs)
connector()[source]
dbengine = 'postgres'
drivers = ('psycopg2', 'pg8000')
lastrowid(table)[source]
prepare(**kwargs)
rollback_prepared(**kwargs)
support_distributed_transaction = True
class pydal.adapters.postgres.PostgreBoolean(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgreNew

class pydal.adapters.postgres.PostgreMeta[source]

Bases: pydal.adapters.AdapterMeta

class pydal.adapters.postgres.PostgreNew(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

class pydal.adapters.postgres.PostgrePG8000(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

adapt(obj)[source]
drivers = ('pg8000',)
execute(**kwargs)
class pydal.adapters.postgres.PostgrePG8000Boolean(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePG8000New, pydal.adapters.postgres.PostgreBoolean

class pydal.adapters.postgres.PostgrePG8000New(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePG8000, pydal.adapters.postgres.PostgreNew

class pydal.adapters.postgres.PostgrePsyco(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

adapt(obj)[source]
drivers = ('psycopg2',)
class pydal.adapters.postgres.PostgrePsycoBoolean(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePsycoNew, pydal.adapters.postgres.PostgreBoolean

class pydal.adapters.postgres.PostgrePsycoNew(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePsyco, pydal.adapters.postgres.PostgreNew

pydal.adapters.sapdb module

pydal.adapters.sqlite module

class pydal.adapters.sqlite.JDBCSQLite(*args, **kwargs)[source]

Bases: pydal.adapters.sqlite.SQLite

after_connection()[source]
connector()[source]
drivers = ('zxJDBC_sqlite',)
class pydal.adapters.sqlite.SQLite(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

after_connection()[source]
connector()[source]
dbengine = 'sqlite'
delete(table, query)[source]
drivers = ('sqlite2', 'sqlite3')
select(query, fields, attributes)[source]
static web2py_extract(lookup, s)[source]
static web2py_regexp(expression, item)[source]
class pydal.adapters.sqlite.Spatialite(*args, **kwargs)[source]

Bases: pydal.adapters.sqlite.SQLite

SPATIALLIBS = {'Darwin': 'libspatialite.dylib', 'Linux': 'libspatialite.so', 'Windows': 'mod_spatialite.dll'}
after_connections()[source]
dbengine = 'spatialite'

pydal.adapters.teradata module

class pydal.adapters.teradata.Teradata(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

close()[source]
connector()[source]
dbengine = ''
drivers = ('pyodbc',)
lastrowid(table)[source]

Module contents

class pydal.adapters.AdapterMeta[source]

Bases: type

Metaclass to support manipulation of adapter classes.

At the moment is used to intercept entity_quoting argument passed to DAL.

class pydal.adapters.Adapters(namespace=None)[source]

Bases: pydal.helpers._internals.Dispatcher

get_for(uri)[source]
register_for(*uris)[source]
pydal.adapters.with_connection(f)[source]
pydal.adapters.with_connection_or_raise(f)[source]