pydal.adapters package

Submodules

pydal.adapters.base module

class pydal.adapters.base.AdapterMeta[source]

Bases: type

Metaclass to support manipulation of adapter classes.

At the moment is used to intercept entity_quoting argument passed to DAL.

class pydal.adapters.base.BaseAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.connection.ConnectionPool

ADD(first, second)[source]
AGGREGATE(first, what)[source]
ALLOW_NULL()[source]
AND(first, second)[source]
AS(first, second)[source]
BELONGS(first, second)[source]
CASE(query, t, f)[source]
CAST(first, second)[source]
COALESCE(first, second)[source]
COALESCE_ZERO(first)[source]
COMMA(first, second)[source]
CONCAT(*items)[source]
CONTAINS(first, second, case_sensitive=True)[source]
COUNT(first, distinct=None)[source]
DIV(first, second)[source]
ENDSWITH(first, second)[source]
EPOCH(first)[source]
EQ(first, second=None)[source]
EXPAND_CASE(query, true_false)[source]
EXTRACT(first, what)[source]
FALSE = 'F'
FALSE_exp = '0'
GE(first, second=None)[source]
GT(first, second=None)[source]
ILIKE(first, second, escape=None)[source]

Case insensitive like operator

INVERT(first)[source]
JOIN()[source]
LE(first, second=None)[source]
LEFT_JOIN()[source]
LENGTH(first)[source]
LIKE(first, second, escape=None)[source]

Case sensitive like operator

LOWER(first)[source]
LT(first, second=None)[source]
MOD(first, second)[source]
MUL(first, second)[source]
NE(first, second=None)[source]
NOT(first)[source]
NOT_NULL(default, field_type)[source]
ON(first, second)[source]
OR(first, second)[source]
PRIMARY_KEY(key)[source]
QUOTE_TEMPLATE = '"%s"'
RANDOM()[source]
RAW(first)[source]
REGEXP(first, second)[source]

Regular expression operator

REPLACE(first, tup)[source]
STARTSWITH(first, second)[source]
SUB(first, second)[source]
SUBSTRING(field, parameters)[source]
TRUE = 'T'
TRUE_exp = '1'
T_SEP = ' '
UPPER(first)[source]
adapt(obj)[source]
alias(table, alias)[source]

Given a table object, makes a new table object with alias name.

build_parsemap()[source]
bulk_insert(table, items)[source]
can_join()[source]
can_select_for_update = True
close_connection()[source]
commit()[source]
commit_on_alter_table = False
commit_prepared(key)[source]
common_filter(query, tablenames)[source]
concat_add(tablename)[source]
connection = None
connector(*args, **kwargs)
constraint_name(table, fieldname)[source]
count(query, distinct=None)[source]
create_sequence_and_triggers(query, table, **args)[source]
create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
current_cursor_in_use = False
cursors_in_use = []
dbpath = None
delete(tablename, query)[source]
distributed_transaction_begin(key)[source]
driver = None
driver_auto_json = []
driver_name = None
drivers = ()
drop(table, mode='')[source]
execute(*a, **b)[source]
execute_test_query()[source]
expand(expression, field_type=None, colnames=False)[source]
expand_all(fields, tablenames)[source]
file_close(fileobj)[source]
file_delete(filename)[source]
file_exists(filename)[source]
file_open(filename, mode='rb', lock=True)[source]
find_driver(adapter_args, uri=None)[source]
folder = None
get_cursor()[source]
get_table(*queries)[source]
id_query(table)[source]
insert(table, fields)[source]
isOperationalError(exception)[source]
isProgrammingError(exception)[source]
is_numerical_type(ftype)[source]
iterparse(sql, fields, colnames, blob_decode=True, cacheable=False)[source]

Iterator to parse one row at a time. It doen’t support the old style virtual fields

iterselect(query, fields, attributes)[source]
lastrowid(table)[source]
like_escaper_default(term)[source]
log(message, table=None)[source]

Logs migrations

It will not log changes if logfile is not specified. Defaults to sql.log

log_execute(*a, **b)[source]
migrate_table(table, sql_fields, sql_fields_old, sql_fields_aux, logfile, fake_migrate=False)[source]
parse(rows, fields, colnames, blob_decode=True, cacheable=False)[source]
parse_blob(value, field_type)[source]
parse_boolean(value, field_type)[source]
parse_date(value, field_type)[source]
parse_datetime(value, field_type)[source]
parse_decimal(value, field_type)[source]
parse_double(value, field_type)[source]
parse_id(value, field_type)[source]
parse_integer(value, field_type)[source]
parse_json(value, field_type)[source]
parse_list_integers(value, field_type)[source]
parse_list_references(value, field_type)[source]
parse_list_strings(value, field_type)[source]
parse_reference(value, field_type)[source]
parse_time(value, field_type)[source]
parse_value(value, field_type, blob_decode=True)[source]
prepare(key)[source]
represent(obj, fieldtype)[source]
represent_exceptions(obj, fieldtype)[source]
rollback()[source]
rollback_prepared(key)[source]
rowslice(rows, minimum=0, maximum=None)[source]

By default this function does nothing; overload when db does not do slicing.

save_dbt(table, sql_fields_current)[source]
select(query, fields, attributes)[source]

Always returns a Rows object, possibly empty.

select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
sequence_name(tablename)[source]
smart_adapt(obj)[source]
sqlsafe_field(fieldname)[source]
sqlsafe_table(tablename, ot=None)[source]
support_distributed_transaction = False
table_alias(tbl)[source]
tables(*queries)[source]
test_query = 'SELECT 1;'
trigger_name(tablename)[source]
truncate(table, mode=' ')[source]
types = {'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'text': 'TEXT', 'float': 'DOUBLE', 'datetime': 'TIMESTAMP', 'bigint': 'INTEGER', 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT', 'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'json': 'TEXT', 'big-id': 'INTEGER PRIMARY KEY AUTOINCREMENT', 'blob': 'BLOB', 'big-reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'string': 'CHAR(%(length)s)', 'list:string': 'TEXT', 'date': 'DATE', 'integer': 'INTEGER', 'password': 'CHAR(%(length)s)', 'list:integer': 'TEXT', 'double': 'DOUBLE', 'decimal': 'DOUBLE', 'upload': 'CHAR(%(length)s)', 'list:reference': 'TEXT', 'boolean': 'CHAR(1)', 'time': 'TIME'}
update(tablename, query, fields)[source]
uploads_in_blob = False
varquote(name)[source]
class pydal.adapters.base.NoSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

ADD(first, second)[source]
AGGREGATE(first, what)[source]
AND(first, second)[source]
AS(first, second)[source]
DIV(first, second)[source]
ENDSWITH(first, second=None)[source]
EXTRACT(first, what)[source]
ILIKE(first, second)[source]
LEFT_JOIN()[source]
LENGTH(first)[source]
LOWER(first)[source]
MUL(first, second)[source]
ON(first, second)[source]
OR(first, second)[source]
PRIMARY_KEY(key)[source]
QUOTE_TEMPLATE = '%s'
RANDOM()[source]
STARTSWITH(first, second=None)[source]
SUB(first, second)[source]
SUBSTRING(field, parameters)[source]
UPPER(first)[source]
can_join()[source]
can_select_for_update = False
close_connection()[source]

remember: no transactions on many NoSQL

commit()[source]

remember: no transactions on many NoSQL

commit_prepared(key)[source]
concat_add(table)[source]
constraint_name(table, fieldname)[source]
create_sequence_and_triggers(query, table, **args)[source]
distributed_transaction_begin(key)[source]
drop(table, mode)[source]
execute(*a, **b)[source]
execute_test_query()[source]

NoSql DBs don’t have a universal query language. Override this specifc driver if need to test connection status. Throw exception on failure.

id_query(table)[source]
lastrowid(table)[source]
log_execute(*a, **b)[source]
migrate_table(*a, **b)[source]
parse_list_integers(value, field_type)[source]
parse_list_references(value, field_type)[source]
parse_list_strings(value, field_type)[source]
prepare(key)[source]
represent(obj, fieldtype)[source]
represent_exceptions(obj, fieldtype)[source]
rollback()[source]

remember: no transactions on many NoSQL

rollback_prepared(key)[source]
rowslice(rows, minimum=0, maximum=None)[source]

pydal.adapters.couchdb module

class pydal.adapters.couchdb.CouchDBAdapter(db, uri='couchdb://127.0.0.1:5984', pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.NoSQLAdapter

AND(first, second)[source]
COMMA(first, second)[source]
EQ(first, second)[source]
NE(first, second)[source]
OR(first, second)[source]
count(query, distinct=None)[source]
create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
delete(tablename, query)[source]
drivers = ('couchdb',)
expand(expression, field_type=None)[source]
file_close(fileobj)[source]
file_exists(filename)[source]
file_open(filename, mode='rb', lock=True)[source]
insert(table, fields)[source]
represent(obj, fieldtype)[source]
select(query, fields, attributes)[source]
types = {'string': <type 'str'>, 'reference': <type 'long'>, 'text': <type 'str'>, 'id': <type 'long'>, 'float': <type 'float'>, 'bigint': <type 'long'>, 'upload': <type 'str'>, 'datetime': <type 'datetime.datetime'>, 'json': <type 'str'>, 'boolean': <type 'bool'>, 'blob': <type 'str'>, 'list:string': <type 'list'>, 'double': <type 'float'>, 'date': <type 'datetime.date'>, 'integer': <type 'long'>, 'password': <type 'str'>, 'list:integer': <type 'list'>, 'time': <type 'datetime.time'>, 'list:reference': <type 'list'>}
update(tablename, query, fields)[source]
uploads_in_blob = True

pydal.adapters.cubrid module

class pydal.adapters.cubrid.CubridAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.mysql.MySQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x1499450>
after_connection()[source]
drivers = ('cubriddb',)

pydal.adapters.db2 module

class pydal.adapters.db2.DB2Adapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

LEFT_JOIN()[source]
RANDOM()[source]
drivers = ('ibm_db_dbi', 'pyodbc')
execute(command, placeholders=None)[source]
lastrowid(table)[source]
represent_exceptions(obj, fieldtype)[source]
rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'CLOB', 'float': 'REAL', 'datetime': 'TIMESTAMP', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'json': 'CLOB', 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL', 'blob': 'BLOB', 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'CLOB', 'date': 'DATE', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'CLOB', 'double': 'DOUBLE', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'CLOB', 'boolean': 'CHAR(1)', 'time': 'TIME'}

pydal.adapters.firebird module

class pydal.adapters.firebird.FireBirdAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

CONTAINS(first, second, case_sensitive=False)[source]
EPOCH(first)[source]
LENGTH(first)[source]
NOT_NULL(default, field_type)[source]
RANDOM()[source]
REGEX_URI = <_sre.SRE_Pattern object at 0x1578a20>
SUBSTRING(field, parameters)[source]
commit_on_alter_table = True
create_sequence_and_triggers(query, table, **args)[source]
drivers = ('kinterbasdb', 'firebirdsql', 'fdb', 'pyodbc')
lastrowid(table)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
sequence_name(tablename)[source]
support_distributed_transaction = True
trigger_name(tablename)[source]
types = {'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'BLOB SUB_TYPE 1', 'float': 'FLOAT', 'datetime': 'TIMESTAMP', 'bigint': 'BIGINT', 'id': 'INTEGER PRIMARY KEY', 'json': 'BLOB SUB_TYPE 1', 'big-id': 'BIGINT PRIMARY KEY', 'blob': 'BLOB SUB_TYPE 0', 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'BLOB SUB_TYPE 1', 'date': 'DATE', 'integer': 'INTEGER', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'BLOB SUB_TYPE 1', 'double': 'DOUBLE PRECISION', 'decimal': 'DECIMAL(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'BLOB SUB_TYPE 1', 'boolean': 'CHAR(1)', 'time': 'TIME'}
class pydal.adapters.firebird.FireBirdEmbeddedAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.firebird.FireBirdAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x14a9370>
drivers = ('kinterbasdb', 'firebirdsql', 'fdb', 'pyodbc')

pydal.adapters.google_adapters module

Adapter for GAE

pydal.adapters.imap module

class pydal.adapters.imap.IMAPAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.NoSQLAdapter

IMAP server adapter

This class is intended as an interface with email IMAP servers to perform simple queries in the web2py DAL query syntax, so email read, search and other related IMAP mail services (as those implemented by brands like Google(r), and Yahoo!(r) can be managed from web2py applications.

The code uses examples by Yuji Tomita on this post: http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137 and is based in docs for Python imaplib, python email and email IETF’s (i.e. RFC2060 and RFC3501)

This adapter was tested with a small set of operations with Gmail(r). Other services requests could raise command syntax and response data issues.

It creates its table and field names “statically”, meaning that the developer should leave the table and field definitions to the DAL instance by calling the adapter’s .define_tables() method. The tables are defined with the IMAP server mailbox list information.

.define_tables() returns a dictionary mapping dal tablenames to the server mailbox names with the following structure:

{<tablename>: str <server mailbox name>}

Here is a list of supported fields:

Field Type Description
uid string  
answered boolean Flag
created date  
content list:string A list of dict text or html parts
to string  
cc string  
bcc string  
size integer the amount of octets of the message*
deleted boolean Flag
draft boolean Flag
flagged boolean Flag
sender string  
recent boolean Flag
seen boolean Flag
subject string  
mime string The mime header declaration
email string The complete RFC822 message (*)
attachments list Each non text part as dict
encoding string The main detected encoding

(*) At the application side it is measured as the length of the RFC822 message string

WARNING: As row id’s are mapped to email sequence numbers, make sure your imap client web2py app does not delete messages during select or update actions, to prevent updating or deleting different messages. Sequence numbers change whenever the mailbox is updated. To avoid this sequence numbers issues, it is recommended the use of uid fields in query references (although the update and delete in separate actions rule still applies).

# This is the code recommended to start imap support
# at the app's model:

imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
imapdb.define_tables()

Here is an (incomplete) list of possible imap commands:

# Count today's unseen messages
# smaller than 6000 octets from the
# inbox mailbox

q = imapdb.INBOX.seen == False
q &= imapdb.INBOX.created == datetime.date.today()
q &= imapdb.INBOX.size < 6000
unread = imapdb(q).count()

# Fetch last query messages
rows = imapdb(q).select()

# it is also possible to filter query select results with limitby and
# sequences of mailbox fields

set.select(<fields sequence>, limitby=(<int>, <int>))

# Mark last query messages as seen
messages = [row.uid for row in rows]
seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)

# Delete messages in the imap database that have mails from mr. Gumby

deleted = 0
for mailbox in imapdb.tables
    deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()

# It is possible also to mark messages for deletion instead of ereasing them
# directly with set.update(deleted=True)


# This object give access
# to the adapter auto mailbox
# mapped names (which native
# mailbox has what table name)

imapdb.mailboxes <dict> # tablename, server native name pairs

# To retrieve a table native mailbox name use:
imapdb.<table>.mailbox

### New features v2.4.1:

# Declare mailboxes statically with tablename, name pairs
# This avoids the extra server names retrieval

imapdb.define_tables({"inbox": "INBOX"})

# Selects without content/attachments/email columns will only
# fetch header and flags

imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
AND(first, second)[source]
BELONGS(first, second)[source]
CONTAINS(first, second, case_sensitive=False)[source]
EQ(first, second)[source]
GE(first, second)[source]
GT(first, second)[source]
LE(first, second)[source]
LT(first, second)[source]
NE(first, second=None)[source]
NOT(first)[source]
OR(first, second)[source]
REGEX_URI = <_sre.SRE_Pattern object at 0x157a850>
convert_date(date, add=None, imf=False)[source]
count(query, distinct=None)[source]
create_table(*args, **kwargs)[source]
dbengine = 'imap'
define_tables(mailbox_names=None)[source]

Auto create common IMAP fileds

This function creates fields definitions “statically” meaning that custom fields as in other adapters should not be supported and definitions handled on a service/mode basis (local syntax for Gmail(r), Ymail(r)

Returns a dictionary with tablename, server native mailbox name pairs.

delete(tablename, query)[source]
drivers = ('imaplib',)
encode_text(text, charset, errors='replace')[source]

convert text for mail to unicode

get_charset(message)[source]
get_last_message(tablename)[source]
get_mailboxes()[source]

Query the mail database for mailbox names

get_query_mailbox(query)[source]
get_uid_bounds(tablename)[source]
static header_represent(f, r)[source]
insert(table, fields)[source]
is_flag(flag)[source]
reconnect(f=None)[source]

IMAP4 Pool connection method

imap connection lacks of self cursor command. A custom command should be provided as a replacement for connection pooling to prevent uncaught remote session closing

select(query, fields, attributes)[source]

Searches and Fetches records and return web2py rows

types = {'boolean': <type 'bool'>, 'string': <type 'str'>, 'list:string': <type 'str'>, 'integer': <type 'int'>, 'date': <type 'datetime.date'>, 'text': <type 'str'>, 'blob': <type 'str'>, 'bigint': <type 'long'>, 'id': <type 'long'>, 'datetime': <type 'datetime.datetime'>}
update(tablename, query, fields)[source]
uri = None

MESSAGE is an identifier for sequence number

pydal.adapters.informix module

class pydal.adapters.informix.InformixAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

NOT_NULL(default, field_type)[source]
RANDOM()[source]
REGEX_URI = <_sre.SRE_Pattern object at 0x1481820>
drivers = ('informixdb',)
execute(command)[source]
lastrowid(table)[source]
represent_exceptions(obj, fieldtype)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'BLOB SUB_TYPE 1', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': 'FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s CONSTRAINT TFK_%(table_name)s_%(field_name)s', 'id': 'SERIAL', 'reference FK': 'REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s CONSTRAINT FK_%(table_name)s_%(field_name)s', 'json': 'BLOB SUB_TYPE 1', 'big-id': 'BIGSERIAL', 'blob': 'BLOB SUB_TYPE 0', 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'BLOB SUB_TYPE 1', 'date': 'DATE', 'integer': 'INTEGER', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'BLOB SUB_TYPE 1', 'double': 'DOUBLE PRECISION', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'BLOB SUB_TYPE 1', 'boolean': 'CHAR(1)', 'time': 'CHAR(8)'}
class pydal.adapters.informix.InformixSEAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.informix.InformixAdapter

work in progress

rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]

pydal.adapters.ingres module

class pydal.adapters.ingres.IngresAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

LEFT_JOIN()[source]
RANDOM()[source]
create_sequence_and_triggers(query, table, **args)[source]
drivers = ('pyodbc',)
lastrowid(table)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'CLOB', 'float': 'FLOAT', 'datetime': 'TIMESTAMP WITHOUT TIME ZONE', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'int not null unique with default next value for ii***lineitemsequence', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'json': 'CLOB', 'big-id': 'bigint not null unique with default next value for ii***lineitemsequence', 'blob': 'BLOB', 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'CLOB', 'date': 'ANSIDATE', 'integer': 'INTEGER4', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'CLOB', 'double': 'FLOAT8', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'CLOB', 'boolean': 'CHAR(1)', 'time': 'TIME WITHOUT TIME ZONE'}
class pydal.adapters.ingres.IngresUnicodeAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.ingres.IngresAdapter

drivers = ('pyodbc',)
types = {'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'NCLOB', 'float': 'FLOAT', 'datetime': 'TIMESTAMP WITHOUT TIME ZONE', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INTEGER4 not null unique with default next value for ii***lineitemsequence', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'json': 'NCLOB', 'big-id': 'BIGINT not null unique with default next value for ii***lineitemsequence', 'blob': 'BLOB', 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'NVARCHAR(%(length)s)', 'list:string': 'NCLOB', 'date': 'ANSIDATE', 'integer': 'INTEGER4', 'password': 'NVARCHAR(%(length)s)', 'list:integer': 'NCLOB', 'double': 'FLOAT8', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'NCLOB', 'boolean': 'CHAR(1)', 'time': 'TIME WITHOUT TIME ZONE'}

pydal.adapters.mongo module

class pydal.adapters.mongo.Binary[source]

Bases: object

class pydal.adapters.mongo.MongoBlob[source]

Bases: pydal.adapters.mongo.Binary

MONGO_BLOB_BYTES = 0
MONGO_BLOB_NON_UTF8_STR = 1
static decode(value)[source]
class pydal.adapters.mongo.MongoDBAdapter(db, uri='mongodb://127.0.0.1:5984/db', pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.NoSQLAdapter

ADD(first, *args, **kwargs)[source]
AGGREGATE(first, *args, **kwargs)[source]
AND(first, second)[source]
AS(first, *args, **kwargs)[source]
AS_MARK = '__#AS#__'
BELONGS(first, second)[source]
CMP_OPS_AGGREGATION_PIPELINE(op, first, second)[source]
COALESCE(first, *args, **kwargs)[source]
COMMA(first, second)[source]
CONTAINS(first, second, case_sensitive=True)[source]
COUNT(first, *args, **kwargs)[source]
DIV(first, *args, **kwargs)[source]
ENDSWITH(first, second)[source]
EPOCH(first, *args, **kwargs)[source]
EQ(first, second=None, *args, **kwargs)[source]
EXPAND_CASE(first, *args, **kwargs)[source]
EXTRACT(first, *args, **kwargs)[source]
class Expanded(adapter, crud, query, fields=(), tablename=None, groupby=None, distinct=False, having=None)[source]

Bases: object

Class to encapsulate a pydal expression and track the parse expansion and its results.

Two different MongoDB mechanisms are targeted here. If the query is sufficiently simple, then simple queries are generated. The bulk of the complexity here is however to support more complex queries that are targeted to the MongoDB Aggregation Pipeline.

This class supports four operations: ‘count’, ‘select’, ‘update’ and ‘delete’.

Behavior varies somewhat for each operation type. However building each pipeline stage is shared where the behavior is the same (or similar) for the different operations.

In general an attempt is made to build the query without using the pipeline, and if that fails then the query is rebuilt with the pipeline.

QUERY constructed in _build_pipeline_query():
$project : used to calculate expressions if needed $match: filters out records
FIELDS constructed in _expand_fields():
FIELDS:COUNT
$group : filter for distinct if needed $group: count the records remaining
FIELDS:SELECT
$group : implement aggregations if needed $project: implement expressions (etc) for select
FIELDS:UPDATE
$project: implement expressions (etc) for update
HAVING constructed in _add_having():
$project : used to calculate expressions $match: filters out records $project : used to filter out previous expression fields
annotate_expression(expression)[source]
get_collection(safe=None)[source]
MongoDBAdapter.GE(*args, **kwargs)[source]
MongoDBAdapter.GROUP_MARK = '__#GROUP#__'
MongoDBAdapter.GT(*args, **kwargs)[source]
MongoDBAdapter.ILIKE(first, second, escape=None)[source]
MongoDBAdapter.INVERT(first)[source]
MongoDBAdapter.LE(*args, **kwargs)[source]
MongoDBAdapter.LENGTH(first)[source]

https://jira.mongodb.org/browse/SERVER-5319 https://github.com/afchin/mongo/commit/f52105977e4d0ccb53bdddfb9c4528a3f3c40bdf

MongoDBAdapter.LIKE(first, second, case_sensitive=True, escape=None)[source]
MongoDBAdapter.LOWER(first, *args, **kwargs)[source]
MongoDBAdapter.LT(*args, **kwargs)[source]
MongoDBAdapter.MOD(first, *args, **kwargs)[source]
MongoDBAdapter.MUL(first, *args, **kwargs)[source]
MongoDBAdapter.NE(first, second=None, *args, **kwargs)[source]
MongoDBAdapter.NOT(first)[source]
exception MongoDBAdapter.NotOnNoSqlError(message=None)[source]

Bases: exceptions.NotImplementedError

MongoDBAdapter.ON(first, second)[source]
MongoDBAdapter.OR(first, second)[source]
MongoDBAdapter.RANDOM()[source]

ORDER BY RANDOM()

https://github.com/mongodb/cookbook/blob/master/content/patterns/random-attribute.txt https://jira.mongodb.org/browse/SERVER-533 http://stackoverflow.com/questions/19412/how-to-request-a-random-row-in-sql

MongoDBAdapter.REGEXP(first, second, case_sensitive=True)[source]

MongoDB provides regular expression capabilities for pattern matching strings in queries. MongoDB uses Perl compatible regular expressions (i.e. ‘PCRE’) version 8.36 with UTF-8 support.

MongoDBAdapter.REGEXP_MARK1 = '__#REGEXP_1#__'
MongoDBAdapter.REGEXP_MARK2 = '__#REGEXP_2#__'
MongoDBAdapter.REGEX_SELECT_AS_PARSER = <_sre.SRE_Pattern object>
MongoDBAdapter.STARTSWITH(first, second)[source]
MongoDBAdapter.SUB(first, *args, **kwargs)[source]
MongoDBAdapter.SUBSTRING(first, *args, **kwargs)[source]
MongoDBAdapter.UPPER(first, *args, **kwargs)[source]
MongoDBAdapter.bulk_insert(table, items)[source]
MongoDBAdapter.check_fields_for_cmp(f)[source]
MongoDBAdapter.check_notnull(table, values)[source]
MongoDBAdapter.check_unique(table, values)[source]
MongoDBAdapter.count(query, distinct=None, snapshot=True)[source]
MongoDBAdapter.create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
MongoDBAdapter.delete(tablename, query, safe=None)[source]
MongoDBAdapter.driver_auto_json = ['loads', 'dumps']
MongoDBAdapter.drivers = ('pymongo',)
MongoDBAdapter.drop(table, mode='')[source]
MongoDBAdapter.expand(expression, field_type=None)[source]
static MongoDBAdapter.has_field(expression)[source]
MongoDBAdapter.insert(table, fields, safe=None)[source]

Safe determines whether a asynchronous request is done or a synchronous action is done For safety, we use by default synchronous requests

MongoDBAdapter.needs_mongodb_aggregation_pipeline(f)[source]
MongoDBAdapter.object_id(arg=None)[source]

Convert input to a valid Mongodb ObjectId instance

self.object_id(“<random>”) -> ObjectId (not unique) instance

MongoDBAdapter.parse_blob(value, field_type)[source]
static MongoDBAdapter.parse_data(expression, attribute, value=None)[source]
MongoDBAdapter.parse_id(value, field_type)[source]
MongoDBAdapter.parse_reference(value, field_type)[source]
MongoDBAdapter.represent(obj, fieldtype)[source]
MongoDBAdapter.select(query, fields, attributes, snapshot=False)[source]
MongoDBAdapter.truncate(table, mode, safe=None)[source]
MongoDBAdapter.types = {'string': <type 'str'>, 'reference': <type 'long'>, 'text': <type 'str'>, 'id': <type 'long'>, 'float': <type 'float'>, 'bigint': <type 'long'>, 'upload': <type 'str'>, 'datetime': <type 'datetime.datetime'>, 'json': <type 'str'>, 'boolean': <type 'bool'>, 'blob': <type 'str'>, 'list:string': <type 'list'>, 'double': <type 'float'>, 'date': <type 'datetime.date'>, 'integer': <type 'long'>, 'password': <type 'str'>, 'list:integer': <type 'list'>, 'time': <type 'datetime.time'>, 'list:reference': <type 'list'>}
MongoDBAdapter.update(tablename, query, fields, safe=None)[source]
MongoDBAdapter.uploads_in_blob = False
MongoDBAdapter.validate_second(f)[source]

pydal.adapters.mssql module

class pydal.adapters.mssql.MSSQL2Adapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLAdapter

ILIKE(first, second, escape=None)[source]

Case insensitive like operator

drivers = ('pyodbc',)
execute(*a, **b)[source]
represent(obj, fieldtype)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'NTEXT', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'NTEXT', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'NVARCHAR(%(length)s)', 'list:string': 'NTEXT', 'date': 'DATETIME', 'integer': 'INT', 'password': 'NVARCHAR(%(length)s)', 'list:integer': 'NTEXT', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'NVARCHAR(%(length)s)', 'list:reference': 'NTEXT', 'boolean': 'BIT', 'time': 'CHAR(8)'}
class pydal.adapters.mssql.MSSQL3Adapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLAdapter

Experimental support for pagination in MSSQL

Requires MSSQL >= 2005, uses ROW_NUMBER()

rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'VARCHAR(MAX)', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'VARCHAR(MAX)', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'VARCHAR(MAX)', 'date': 'DATETIME', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'VARCHAR(MAX)', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'VARCHAR(MAX)', 'boolean': 'BIT', 'time': 'TIME(7)'}
class pydal.adapters.mssql.MSSQL3NAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLNAdapter

drivers = ('pyodbc',)

Experimental support for pagination in MSSQL Experimental: see MSSQLNAdapter docstring for warnings

Requires MSSQL >= 2005, uses ROW_NUMBER()

rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'NVARCHAR(MAX)', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'NVARCHAR(MAX)', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'NVARCHAR(%(length)s)', 'list:string': 'NVARCHAR(MAX)', 'date': 'DATETIME', 'integer': 'INT', 'password': 'NVARCHAR(%(length)s)', 'list:integer': 'NVARCHAR(MAX)', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'NVARCHAR(%(length)s)', 'list:reference': 'NVARCHAR(MAX)', 'boolean': 'BIT', 'time': 'TIME(7)'}
class pydal.adapters.mssql.MSSQL4Adapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLAdapter

Support for “native” pagination

Requires MSSQL >= 2012, uses OFFSET ... ROWS ... FETCH NEXT ... ROWS ONLY

rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'VARCHAR(MAX)', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'VARCHAR(MAX)', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'VARCHAR(MAX)', 'date': 'DATETIME', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'VARCHAR(MAX)', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'VARCHAR(MAX)', 'boolean': 'BIT', 'time': 'TIME(7)'}
class pydal.adapters.mssql.MSSQL4NAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLNAdapter

Experimental: see MSSQLNAdapter docstring for warnings Support for “native” pagination

Unicode-compatible version Requires MSSQL >= 2012, uses OFFSET ... ROWS ... FETCH NEXT ... ROWS ONLY After careful testing, this should be the de-facto adapter for recent MSSQL backends

rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'NVARCHAR(MAX)', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'NVARCHAR(MAX)', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'NVARCHAR(%(length)s)', 'list:string': 'NVARCHAR(MAX)', 'date': 'DATE', 'integer': 'INT', 'password': 'NVARCHAR(%(length)s)', 'list:integer': 'NVARCHAR(MAX)', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'NVARCHAR(%(length)s)', 'list:reference': 'NVARCHAR(MAX)', 'boolean': 'BIT', 'time': 'TIME(7)'}
class pydal.adapters.mssql.MSSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

AGGREGATE(first, what)[source]
ALLOW_NULL()[source]
CAST(first, second)[source]
CONCAT(*items)[source]
CONTAINS(first, second, case_sensitive=True)[source]
ENDSWITH(first, second)[source]
EPOCH(first)[source]
EXTRACT(field, what)[source]
FALSE = 0
ILIKE(first, second, escape=None)[source]

Case insensitive like operator

LEFT_JOIN()[source]
LENGTH(first)[source]
LIKE(first, second, escape=None)[source]

Case sensitive like operator

PRIMARY_KEY(key)[source]
QUOTE_TEMPLATE = '"%s"'
RANDOM()[source]
REGEXP(first, second)[source]
REGEX_ARGPATTERN = <_sre.SRE_Pattern object>
REGEX_DSN = <_sre.SRE_Pattern object>
REGEX_URI = <_sre.SRE_Pattern object at 0x1481d90>
STARTSWITH(first, second)[source]
ST_ASTEXT(first)[source]
ST_CONTAINS(first, second)[source]
ST_DISTANCE(first, second)[source]
ST_EQUALS(first, second)[source]
ST_INTERSECTS(first, second)[source]
ST_OVERLAPS(first, second)[source]
ST_TOUCHES(first, second)[source]
ST_WITHIN(first, second)[source]
SUBSTRING(field, parameters)[source]
TRUE = 1
T_SEP = 'T'
concat_add(tablename)[source]
drivers = ('pyodbc',)
lastrowid(table)[source]
like_escaper_default(term)[source]
mssql_like_normalizer(term)[source]
represent(obj, fieldtype)[source]
rowslice(rows, minimum=0, maximum=None)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'TEXT', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'TEXT', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'TEXT', 'date': 'DATETIME', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'TEXT', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'TEXT', 'boolean': 'BIT', 'time': 'CHAR(8)'}
varquote(name)[source]
class pydal.adapters.mssql.MSSQLNAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLAdapter

ILIKE(first, second, escape=None)[source]

Case insensitive like operator

drivers = ('pyodbc',)

Experimental – base class for handling unicode in MSSQL by default. Needs lots of testing. Try this on a fresh (or on a legacy) database. Using this in a database handled previously with non-unicode aware adapter is NOT supported

execute(*a, **b)[source]
represent(obj, fieldtype)[source]
types = {'reference': 'INT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'NTEXT', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'NTEXT', 'blob': 'IMAGE', 'big-reference': 'BIGINT %(null)s %(unique)s, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'NVARCHAR(%(length)s)', 'list:string': 'NTEXT', 'date': 'DATETIME', 'integer': 'INT', 'password': 'NVARCHAR(%(length)s)', 'list:integer': 'NTEXT', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'NVARCHAR(%(length)s)', 'list:reference': 'NTEXT', 'boolean': 'BIT', 'time': 'CHAR(8)'}
class pydal.adapters.mssql.SybaseAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLAdapter

drivers = 'Sybase'
types = {'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'TEXT', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'INT IDENTITY PRIMARY KEY', 'geography': 'geography', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 'json': 'TEXT', 'blob': 'IMAGE', 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'CHAR VARYING(%(length)s)', 'list:string': 'TEXT', 'date': 'DATETIME', 'integer': 'INT', 'password': 'CHAR VARYING(%(length)s)', 'list:integer': 'TEXT', 'geometry': 'geometry', 'double': 'FLOAT', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'CHAR VARYING(%(length)s)', 'list:reference': 'TEXT', 'boolean': 'BIT', 'time': 'CHAR(8)'}
class pydal.adapters.mssql.VerticaAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLAdapter

EXTRACT(first, what)[source]
T_SEP = ' '
drivers = ('pyodbc',)
execute(a)[source]
lastrowid(table)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'BYTEA', 'decimal': 'DECIMAL(%(precision)s,%(scale)s)', 'float': 'FLOAT', 'bigint': 'BIGINT', 'upload': 'VARCHAR(%(length)s)', 'datetime': 'DATETIME', 'json': 'VARCHAR(%(length)s)', 'boolean': 'BOOLEAN', 'id': 'IDENTITY', 'blob': 'BYTEA', 'list:string': 'BYTEA', 'double': 'DOUBLE PRECISION', 'date': 'DATE', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'BYTEA', 'time': 'TIME', 'list:reference': 'BYTEA'}

pydal.adapters.mysql module

class pydal.adapters.mysql.MySQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

CAST(first, second)[source]
CONCAT(*items)[source]
EPOCH(first)[source]
QUOTE_TEMPLATE = '`%s`'
RANDOM()[source]
REGEXP(first, second)[source]
REGEX_URI = <_sre.SRE_Pattern object at 0x1499450>
SUBSTRING(field, parameters)[source]
after_connection()[source]
commit_on_alter_table = True
commit_prepared(key)[source]
distributed_transaction_begin(key)[source]
drivers = ('MySQLdb', 'pymysql', 'mysqlconnector')
prepare(key)[source]
rollback_prepared(key)[source]
support_distributed_transaction = True
types = {'reference': 'INT %(null)s %(unique)s, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'LONGTEXT', 'float': 'FLOAT', 'datetime': 'DATETIME', 'bigint': 'BIGINT', 'id': 'INT AUTO_INCREMENT NOT NULL', 'reference FK': ', CONSTRAINT `FK_%(constraint_name)s` FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'json': 'LONGTEXT', 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL', 'blob': 'LONGBLOB', 'big-reference': 'BIGINT %(null)s %(unique)s, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'LONGTEXT', 'date': 'DATE', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'LONGTEXT', 'double': 'DOUBLE', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'LONGTEXT', 'boolean': 'CHAR(1)', 'time': 'TIME'}
varquote(name)[source]

pydal.adapters.oracle module

class pydal.adapters.oracle.OracleAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

LEFT_JOIN()[source]
NOT_NULL(default, field_type)[source]
RANDOM()[source]
REGEXP(first, second)[source]
after_connection()[source]
commit_on_alter_table = False
constraint_name(tablename, fieldname)[source]
create_sequence_and_triggers(query, table, **args)[source]
drivers = ('cx_Oracle',)
execute(command, args=None)[source]
insert(table, fields)[source]
lastrowid(table)[source]
oracle_fix = <_sre.SRE_Pattern object>
represent_exceptions(obj, fieldtype)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
sqlsafe_table(tablename, ot=None)[source]
trigger_name(tablename)[source]
types = {'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'CLOB', 'float': 'FLOAT', 'datetime': 'DATE', 'bigint': 'NUMBER', 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'NUMBER PRIMARY KEY', 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'json': 'CLOB', 'big-id': 'NUMBER PRIMARY KEY', 'blob': 'CLOB', 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR2(%(length)s)', 'list:string': 'CLOB', 'date': 'DATE', 'integer': 'INT', 'password': 'VARCHAR2(%(length)s)', 'list:integer': 'CLOB', 'double': 'BINARY_DOUBLE', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR2(%(length)s)', 'list:reference': 'CLOB', 'boolean': 'CHAR(1)', 'time': 'CHAR(8)'}

pydal.adapters.postgres module

class pydal.adapters.postgres.JDBCPostgreSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgreSQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x1481820>
after_connection()[source]
drivers = ('zxJDBC',)
class pydal.adapters.postgres.NewPostgreSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgreSQLAdapter

ANY(first)[source]
CONTAINS(first, second, case_sensitive=True)[source]
EQ(first, second=None)[source]
ILIKE(first, second, escape=None)[source]
drivers = ('psycopg2', 'pg8000')
parse_list_integers(value, field_type)[source]
parse_list_references(value, field_type)[source]
parse_list_strings(value, field_type)[source]
represent(obj, fieldtype)[source]
types = {'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'text': 'TEXT', 'float': 'FLOAT', 'datetime': 'TIMESTAMP', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT "FK_%(foreign_table)s_PK" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'SERIAL PRIMARY KEY', 'geography': 'GEOGRAPHY', 'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGSERIAL PRIMARY KEY', 'json': 'TEXT', 'blob': 'BYTEA', 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'TEXT[]', 'date': 'DATE', 'integer': 'INTEGER', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'BIGINT[]', 'geometry': 'GEOMETRY', 'double': 'FLOAT8', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'BIGINT[]', 'boolean': 'CHAR(1)', 'time': 'TIME'}
class pydal.adapters.postgres.PostgreSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

ADD(first, second)[source]
ILIKE(first, second, escape=None)[source]

Case sensitive like operator

LIKE(first, second, escape=None)[source]

Case sensitive like operator

QUOTE_TEMPLATE = '"%s"'
RANDOM()[source]
REGEXP(first, second)[source]
REGEX_URI = <_sre.SRE_Pattern object at 0x14916c0>
ST_ASGEOJSON(first, second)[source]

http://postgis.org/docs/ST_AsGeoJSON.html

ST_ASTEXT(first)[source]

http://postgis.org/docs/ST_AsText.html

ST_CONTAINS(first, second)[source]

http://postgis.org/docs/ST_Contains.html

ST_DISTANCE(first, second)[source]

http://postgis.org/docs/ST_Distance.html

ST_DWITHIN(first, tup)[source]

http://postgis.org/docs/ST_DWithin.html

ST_EQUALS(first, second)[source]

http://postgis.org/docs/ST_Equals.html

ST_INTERSECTS(first, second)[source]

http://postgis.org/docs/ST_Intersects.html

ST_OVERLAPS(first, second)[source]

http://postgis.org/docs/ST_Overlaps.html

ST_SIMPLIFY(first, second)[source]

http://postgis.org/docs/ST_Simplify.html

ST_SIMPLIFYPRESERVETOPOLOGY(first, second)[source]

http://postgis.org/docs/ST_SimplifyPreserveTopology.html

ST_TOUCHES(first, second)[source]

http://postgis.org/docs/ST_Touches.html

ST_WITHIN(first, second)[source]

http://postgis.org/docs/ST_Within.html

ST_X(first)[source]

http://postgis.org/docs/ST_X.html

ST_Y(first)[source]

http://postgis.org/docs/ST_Y.html

adapt(obj)[source]
after_connection()[source]
commit_prepared(key)[source]
create_sequence_and_triggers(query, table, **args)[source]
distributed_transaction_begin(key)[source]
drivers = ('psycopg2', 'pg8000')
execute(*a, **b)[source]
lastrowid(table=None)[source]
prepare(key)[source]
represent(obj, fieldtype)[source]
rollback_prepared(key)[source]
sequence_name(table)[source]
support_distributed_transaction = True
try_json()[source]
types = {'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'text': 'TEXT', 'float': 'FLOAT', 'datetime': 'TIMESTAMP', 'bigint': 'BIGINT', 'reference TFK': ' CONSTRAINT "FK_%(foreign_table)s_PK" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 'id': 'SERIAL PRIMARY KEY', 'geography': 'GEOGRAPHY', 'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'big-id': 'BIGSERIAL PRIMARY KEY', 'json': 'TEXT', 'blob': 'BYTEA', 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'TEXT', 'date': 'DATE', 'integer': 'INTEGER', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'TEXT', 'geometry': 'GEOMETRY', 'double': 'FLOAT8', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'TEXT', 'boolean': 'CHAR(1)', 'time': 'TIME'}
varquote(name)[source]

pydal.adapters.sapdb module

class pydal.adapters.sapdb.SAPDBAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x14916c0>
create_sequence_and_triggers(query, table, **args)[source]
drivers = ('sapdb',)
lastrowid(table)[source]
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
sequence_name(table)[source]
support_distributed_transaction = False
types = {'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'text': 'LONG', 'float': 'FLOAT', 'datetime': 'TIMESTAMP', 'bigint': 'BIGINT', 'id': 'INT PRIMARY KEY', 'json': 'LONG', 'big-id': 'BIGINT PRIMARY KEY', 'blob': 'LONG', 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'string': 'VARCHAR(%(length)s)', 'list:string': 'LONG', 'date': 'DATE', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'LONG', 'double': 'DOUBLE PRECISION', 'decimal': 'FIXED(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'LONG', 'boolean': 'CHAR(1)', 'time': 'TIME'}

pydal.adapters.sqlite module

class pydal.adapters.sqlite.JDBCSQLiteAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.sqlite.SQLiteAdapter

after_connection()[source]
drivers = ('zxJDBC_sqlite',)
execute(a)[source]
class pydal.adapters.sqlite.SQLiteAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

EXTRACT(field, what)[source]
REGEXP(first, second)[source]
after_connection()[source]
can_select_for_update = None
delete(tablename, query)[source]
drivers = ('sqlite2', 'sqlite3')
select(query, fields, attributes)[source]

Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION. Note that the entire database, rather than one record, is locked (it will be locked eventually anyway by the following UPDATE).

static web2py_extract(lookup, s)[source]
static web2py_regexp(expression, item)[source]
class pydal.adapters.sqlite.SpatiaLiteAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.sqlite.SQLiteAdapter

ST_ASGEOJSON(first, second)[source]
ST_ASTEXT(first)[source]
ST_CONTAINS(first, second)[source]
ST_DISTANCE(first, second)[source]
ST_EQUALS(first, second)[source]
ST_INTERSECTS(first, second)[source]
ST_OVERLAPS(first, second)[source]
ST_SIMPLIFY(first, second)[source]
ST_TOUCHES(first, second)[source]
ST_WITHIN(first, second)[source]
after_connection()[source]
drivers = ('sqlite3', 'sqlite2')
represent(obj, fieldtype)[source]
types = {'string': 'CHAR(%(length)s)', 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s', 'text': 'TEXT', 'float': 'DOUBLE', 'datetime': 'TIMESTAMP', 'bigint': 'INTEGER', 'list:string': 'TEXT', 'date': 'DATE', 'integer': 'INTEGER', 'password': 'CHAR(%(length)s)', 'list:integer': 'TEXT', 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT', 'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 'geometry': 'GEOMETRY', 'double': 'DOUBLE', 'decimal': 'DOUBLE', 'big-id': 'INTEGER PRIMARY KEY AUTOINCREMENT', 'list:reference': 'TEXT', 'json': 'TEXT', 'boolean': 'CHAR(1)', 'upload': 'CHAR(%(length)s)', 'blob': 'BLOB', 'time': 'TIME', 'big-reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s %(null)s %(unique)s'}

pydal.adapters.teradata module

class pydal.adapters.teradata.TeradataAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None)[source]

Bases: pydal.adapters.base.BaseAdapter

LEFT_JOIN()[source]
close(action='commit', really=True)[source]
drivers = ('pyodbc',)
select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)[source]
types = {'reference': 'INT', 'text': 'VARCHAR(2000)', 'float': 'REAL', 'datetime': 'TIMESTAMP', 'bigint': 'BIGINT', 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)', 'id': 'INT GENERATED ALWAYS AS IDENTITY', 'reference FK': ' REFERENCES %(foreign_key)s', 'json': 'VARCHAR(4000)', 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', 'blob': 'BLOB', 'big-reference': 'BIGINT', 'string': 'VARCHAR(%(length)s)', 'list:string': 'VARCHAR(4000)', 'date': 'DATE', 'integer': 'INT', 'password': 'VARCHAR(%(length)s)', 'list:integer': 'VARCHAR(4000)', 'geometry': 'ST_GEOMETRY', 'double': 'DOUBLE', 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 'upload': 'VARCHAR(%(length)s)', 'list:reference': 'VARCHAR(4000)', 'boolean': 'CHAR(1)', 'time': 'TIME'}

Module contents

pydal.adapters.GoogleDatastoreAdapter = None

make the import available for BaseAdapter