1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite & SpatiaLite
21 - MySQL
22 - Postgres
23 - Firebird
24 - Oracle
25 - MS SQL
26 - DB2
27 - Interbase
28 - Ingres
29 - Informix (9+ and SE)
30 - SapDB (experimental)
31 - Cubrid (experimental)
32 - CouchDB (experimental)
33 - MongoDB (in progress)
34 - Google:nosql
35 - Google:sql
36 - Teradata
37 - IMAP (experimental)
38
39 Example of usage:
40
41 >>> # from dal import DAL, Field
42
43 ### create DAL connection (and create DB if it doesn't exist)
44 >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
45 ... folder=None)
46
47 ### define a table 'person' (create/alter as necessary)
48 >>> person = db.define_table('person',Field('name','string'))
49
50 ### insert a record
51 >>> id = person.insert(name='James')
52
53 ### retrieve it by id
54 >>> james = person(id)
55
56 ### retrieve it by name
57 >>> james = person(name='James')
58
59 ### retrieve it by arbitrary query
60 >>> query = (person.name=='James') & (person.name.startswith('J'))
61 >>> james = db(query).select(person.ALL)[0]
62
63 ### update one record
64 >>> james.update_record(name='Jim')
65 <Row {'id': 1, 'name': 'Jim'}>
66
67 ### update multiple records by query
68 >>> db(person.name.like('J%')).update(name='James')
69 1
70
71 ### delete records by query
72 >>> db(person.name.lower() == 'jim').delete()
73 0
74
75 ### retrieve multiple records (rows)
76 >>> people = db(person).select(orderby=person.name,
77 ... groupby=person.name, limitby=(0,100))
78
79 ### further filter them
80 >>> james = people.find(lambda row: row.name == 'James').first()
81 >>> print james.id, james.name
82 1 James
83
84 ### check aggregates
85 >>> counter = person.id.count()
86 >>> print db(person).select(counter).first()(counter)
87 1
88
89 ### delete one record
90 >>> james.delete_record()
91 1
92
93 ### delete (drop) entire database table
94 >>> person.drop()
95
96 Supported field types:
97 id string text boolean integer double decimal password upload
98 blob time date datetime
99
100 Supported DAL URI strings:
101 'sqlite://test.db'
102 'spatialite://test.db'
103 'sqlite:memory'
104 'spatialite:memory'
105 'jdbc:sqlite://test.db'
106 'mysql://root:none@localhost/test'
107 'postgres://mdipierro:password@localhost/test'
108 'postgres:psycopg2://mdipierro:password@localhost/test'
109 'postgres:pg8000://mdipierro:password@localhost/test'
110 'jdbc:postgres://mdipierro:none@localhost/test'
111 'mssql://web2py:none@A64X2/web2py_test'
112 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
113 'oracle://username:password@database'
114 'firebird://user:password@server:3050/database'
115 'db2://DSN=dsn;UID=user;PWD=pass'
116 'firebird://username:password@hostname/database'
117 'firebird_embedded://username:password@c://path'
118 'informix://user:password@server:3050/database'
119 'informixu://user:password@server:3050/database' # unicode informix
120 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
121 'google:datastore' # for google app engine datastore
122 'google:sql' # for google app engine with sql (mysql compatible)
123 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
124 'imap://user:password@server:port' # experimental
125 'mongodb://user:password@server:port/database' # experimental
126
127 For more info:
128 help(DAL)
129 help(Field)
130 """
131
132
133
134
135
136 __all__ = ['DAL', 'Field']
137
138 MAXCHARLENGTH = 2**15
139 DEFAULTLENGTH = {'string':512,
140 'password':512,
141 'upload':512,
142 'text':2**15,
143 'blob':2**31}
144 TIMINGSSIZE = 100
145 SPATIALLIBS = {
146 'Windows':'libspatialite',
147 'Linux':'libspatialite.so',
148 'Darwin':'libspatialite.dylib'
149 }
150 DEFAULT_URI = 'sqlite://dummy.db'
151
152 import re
153 import sys
154 import locale
155 import os
156 import types
157 import datetime
158 import threading
159 import time
160 import csv
161 import cgi
162 import copy
163 import socket
164 import logging
165 import base64
166 import shutil
167 import marshal
168 import decimal
169 import struct
170 import urllib
171 import hashlib
172 import uuid
173 import glob
174 import traceback
175 import platform
176
177 PYTHON_VERSION = sys.version_info[0]
178 if PYTHON_VERSION == 2:
179 import cPickle as pickle
180 import cStringIO as StringIO
181 import copy_reg as copyreg
182 hashlib_md5 = hashlib.md5
183 bytes, unicode = str, unicode
184 else:
185 import pickle
186 from io import StringIO as StringIO
187 import copyreg
188 long = int
189 hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
190 bytes, unicode = bytes, str
191
192 CALLABLETYPES = (types.LambdaType, types.FunctionType,
193 types.BuiltinFunctionType,
194 types.MethodType, types.BuiltinMethodType)
195
196 TABLE_ARGS = set(
197 ('migrate','primarykey','fake_migrate','format','redefine',
198 'singular','plural','trigger_name','sequence_name',
199 'common_filter','polymodel','table_class','on_define','actual_name'))
200
201 SELECT_ARGS = set(
202 ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
203 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby'))
204
205 ogetattr = object.__getattribute__
206 osetattr = object.__setattr__
207 exists = os.path.exists
208 pjoin = os.path.join
209
210
211
212
213 try:
214 from utils import web2py_uuid
215 except (ImportError, SystemError):
216 import uuid
218
219 try:
220 import portalocker
221 have_portalocker = True
222 except ImportError:
223 have_portalocker = False
224
225 try:
226 import serializers
227 have_serializers = True
228 except ImportError:
229 have_serializers = False
230 try:
231 import json as simplejson
232 except ImportError:
233 try:
234 import gluon.contrib.simplejson as simplejson
235 except ImportError:
236 simplejson = None
237
238 try:
239 import validators
240 have_validators = True
241 except (ImportError, SyntaxError):
242 have_validators = False
243
244 LOGGER = logging.getLogger("web2py.dal")
245 DEFAULT = lambda:0
246
247 GLOBAL_LOCKER = threading.RLock()
248 THREAD_LOCAL = threading.local()
249
250
251
252
253 REGEX_TYPE = re.compile('^([\w\_\:]+)')
254 REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
255 REGEX_W = re.compile('^\w+$')
256 REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
257 REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
258 REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
259 REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
260 REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
261 REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
262 REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
263 REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
264 REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
265 REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
266 REGEX_QUOTES = re.compile("'[^']*'")
267 REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
268 REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
269 REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)')
270
271
272
273 DRIVERS = []
274
275 try:
276 from new import classobj
277 from google.appengine.ext import db as gae
278 from google.appengine.api import namespace_manager, rdbms
279 from google.appengine.api.datastore_types import Key
280 from google.appengine.ext.db.polymodel import PolyModel
281 DRIVERS.append('google')
282 except ImportError:
283 pass
284
285 if not 'google' in DRIVERS:
286
287 try:
288 from pysqlite2 import dbapi2 as sqlite2
289 DRIVERS.append('SQLite(sqlite2)')
290 except ImportError:
291 LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
292
293 try:
294 from sqlite3 import dbapi2 as sqlite3
295 DRIVERS.append('SQLite(sqlite3)')
296 except ImportError:
297 LOGGER.debug('no SQLite drivers sqlite3')
298
299 try:
300
301 try:
302 import contrib.pymysql as pymysql
303
304
305 pymysql.ESCAPE_REGEX = re.compile("'")
306 pymysql.ESCAPE_MAP = {"'": "''"}
307
308 except ImportError:
309 import pymysql
310 DRIVERS.append('MySQL(pymysql)')
311 except ImportError:
312 LOGGER.debug('no MySQL driver pymysql')
313
314 try:
315 import MySQLdb
316 DRIVERS.append('MySQL(MySQLdb)')
317 except ImportError:
318 LOGGER.debug('no MySQL driver MySQLDB')
319
320
321 try:
322 import psycopg2
323 from psycopg2.extensions import adapt as psycopg2_adapt
324 DRIVERS.append('PostgreSQL(psycopg2)')
325 except ImportError:
326 LOGGER.debug('no PostgreSQL driver psycopg2')
327
328 try:
329
330 try:
331 import contrib.pg8000.dbapi as pg8000
332 except ImportError:
333 import pg8000.dbapi as pg8000
334 DRIVERS.append('PostgreSQL(pg8000)')
335 except ImportError:
336 LOGGER.debug('no PostgreSQL driver pg8000')
337
338 try:
339 import cx_Oracle
340 DRIVERS.append('Oracle(cx_Oracle)')
341 except ImportError:
342 LOGGER.debug('no Oracle driver cx_Oracle')
343
344 try:
345 try:
346 import pyodbc
347 except ImportError:
348 try:
349 import contrib.pypyodbc as pyodbc
350 except Exception, e:
351 raise ImportError(str(e))
352 DRIVERS.append('MSSQL(pyodbc)')
353 DRIVERS.append('DB2(pyodbc)')
354 DRIVERS.append('Teradata(pyodbc)')
355 DRIVERS.append('Ingres(pyodbc)')
356 except ImportError:
357 LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
358
359 try:
360 import Sybase
361 DRIVERS.append('Sybase(Sybase)')
362 except ImportError:
363 LOGGER.debug('no Sybase driver')
364
365 try:
366 import kinterbasdb
367 DRIVERS.append('Interbase(kinterbasdb)')
368 DRIVERS.append('Firebird(kinterbasdb)')
369 except ImportError:
370 LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
371
372 try:
373 import fdb
374 DRIVERS.append('Firebird(fdb)')
375 except ImportError:
376 LOGGER.debug('no Firebird driver fdb')
377
378 try:
379 import firebirdsql
380 DRIVERS.append('Firebird(firebirdsql)')
381 except ImportError:
382 LOGGER.debug('no Firebird driver firebirdsql')
383
384 try:
385 import informixdb
386 DRIVERS.append('Informix(informixdb)')
387 LOGGER.warning('Informix support is experimental')
388 except ImportError:
389 LOGGER.debug('no Informix driver informixdb')
390
391 try:
392 import sapdb
393 DRIVERS.append('SQL(sapdb)')
394 LOGGER.warning('SAPDB support is experimental')
395 except ImportError:
396 LOGGER.debug('no SAP driver sapdb')
397
398 try:
399 import cubriddb
400 DRIVERS.append('Cubrid(cubriddb)')
401 LOGGER.warning('Cubrid support is experimental')
402 except ImportError:
403 LOGGER.debug('no Cubrid driver cubriddb')
404
405 try:
406 from com.ziclix.python.sql import zxJDBC
407 import java.sql
408
409 from org.sqlite import JDBC
410 zxJDBC_sqlite = java.sql.DriverManager
411 DRIVERS.append('PostgreSQL(zxJDBC)')
412 DRIVERS.append('SQLite(zxJDBC)')
413 LOGGER.warning('zxJDBC support is experimental')
414 is_jdbc = True
415 except ImportError:
416 LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
417 is_jdbc = False
418
419 try:
420 import couchdb
421 DRIVERS.append('CouchDB(couchdb)')
422 except ImportError:
423 LOGGER.debug('no Couchdb driver couchdb')
424
425 try:
426 import pymongo
427 DRIVERS.append('MongoDB(pymongo)')
428 except:
429 LOGGER.debug('no MongoDB driver pymongo')
430
431 try:
432 import imaplib
433 DRIVERS.append('IMAP(imaplib)')
434 except:
435 LOGGER.debug('no IMAP driver imaplib')
436
437 PLURALIZE_RULES = [
438 (re.compile('child$'), re.compile('child$'), 'children'),
439 (re.compile('oot$'), re.compile('oot$'), 'eet'),
440 (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
441 (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
442 (re.compile('sis$'), re.compile('sis$'), 'ses'),
443 (re.compile('man$'), re.compile('man$'), 'men'),
444 (re.compile('ife$'), re.compile('ife$'), 'ives'),
445 (re.compile('eau$'), re.compile('eau$'), 'eaux'),
446 (re.compile('lf$'), re.compile('lf$'), 'lves'),
447 (re.compile('[sxz]$'), re.compile('$'), 'es'),
448 (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
449 (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
450 (re.compile('$'), re.compile('$'), 's'),
451 ]
458
463
466
469
471
474
476 regex = re.compile('\.keyword(?=\w)')
477 a = regex.sub('."%s"' % keyword,a)
478 return a
479
480 if 'google' in DRIVERS:
481
482 is_jdbc = False
485 """
486 GAE decimal implementation
487 """
488 data_type = decimal.Decimal
489
490 - def __init__(self, precision, scale, **kwargs):
491 super(GAEDecimalProperty, self).__init__(self, **kwargs)
492 d = '1.'
493 for x in range(scale):
494 d += '0'
495 self.round = decimal.Decimal(d)
496
504
506 if value is None or value == '':
507 return None
508 else:
509 return decimal.Decimal(value).quantize(self.round)
510
512 value = super(GAEDecimalProperty, self).validate(value)
513 if value is None or isinstance(value, decimal.Decimal):
514 return value
515 elif isinstance(value, basestring):
516 return decimal.Decimal(value)
517 raise gae.BadValueError("Property %s must be a Decimal or string."\
518 % self.name)
519
525
526 POOLS = {}
527 check_active_connection = True
528
529 @staticmethod
532
533
534
535 - def close(self,action='commit',really=True):
552
553 @staticmethod
555 """ to close cleanly databases in a multithreaded environment """
556 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
557 for db_uid, db_group in dbs:
558 for db in db_group:
559 if hasattr(db,'_adapter'):
560 db._adapter.close(action)
561 getattr(THREAD_LOCAL,'db_instances',{}).clear()
562 getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
563 if callable(action):
564 action(None)
565 return
566
568 """ this actually does not make the folder. it has to be there """
569 self.folder = getattr(THREAD_LOCAL,'folder','')
570
571
572 if False and self.folder and not exists(self.folder):
573 os.mkdir(self.folder)
574
576 """hook for the after_connection parameter"""
577 if callable(self._after_connection):
578 self._after_connection(self)
579 self.after_connection()
580
582 """ this it is supposed to be overloaded by adapters"""
583 pass
584
586 """
587 this function defines: self.connection and self.cursor
588 (iff cursor is True)
589 if self.pool_size>0 it will try pull the connection from the pool
590 if the connection is not active (closed by db server) it will loop
591 if not self.pool_size or no active connections in pool makes a new one
592 """
593 if getattr(self,'connection', None) != None:
594 return
595 if f is None:
596 f = self.connector
597
598 if not hasattr(self, "driver") or self.driver is None:
599 LOGGER.debug("Skipping connection since there's no driver")
600 return
601
602 if not self.pool_size:
603 self.connection = f()
604 self.cursor = cursor and self.connection.cursor()
605 else:
606 uri = self.uri
607 POOLS = ConnectionPool.POOLS
608 while True:
609 GLOBAL_LOCKER.acquire()
610 if not uri in POOLS:
611 POOLS[uri] = []
612 if POOLS[uri]:
613 self.connection = POOLS[uri].pop()
614 GLOBAL_LOCKER.release()
615 self.cursor = cursor and self.connection.cursor()
616 try:
617 if self.cursor and self.check_active_connection:
618 self.execute('SELECT 1;')
619 break
620 except:
621 pass
622 else:
623 GLOBAL_LOCKER.release()
624 self.connection = f()
625 self.cursor = cursor and self.connection.cursor()
626 break
627 self.after_connection_hook()
628
635 native_json = False
636 driver = None
637 driver_name = None
638 drivers = ()
639 connection = None
640 maxcharlength = MAXCHARLENGTH
641 commit_on_alter_table = False
642 support_distributed_transaction = False
643 uploads_in_blob = False
644 can_select_for_update = True
645
646 TRUE = 'T'
647 FALSE = 'F'
648 T_SEP = ' '
649 types = {
650 'boolean': 'CHAR(1)',
651 'string': 'CHAR(%(length)s)',
652 'text': 'TEXT',
653 'json': 'TEXT',
654 'password': 'CHAR(%(length)s)',
655 'blob': 'BLOB',
656 'upload': 'CHAR(%(length)s)',
657 'integer': 'INTEGER',
658 'bigint': 'INTEGER',
659 'float':'DOUBLE',
660 'double': 'DOUBLE',
661 'decimal': 'DOUBLE',
662 'date': 'DATE',
663 'time': 'TIME',
664 'datetime': 'TIMESTAMP',
665 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
666 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
667 'list:integer': 'TEXT',
668 'list:string': 'TEXT',
669 'list:reference': 'TEXT',
670
671 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
672 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
673 }
674
676 return table._id != None
677
679 return "'%s'" % obj.replace("'", "''")
680
682 if isinstance(obj,(int,float)):
683 return str(obj)
684 return self.adapt(str(obj))
685
687 """
688 to be used ONLY for files that on GAE may not be on filesystem
689 """
690 return exists(filename)
691
692 - def file_open(self, filename, mode='rb', lock=True):
693 """
694 to be used ONLY for files that on GAE may not be on filesystem
695 """
696 if have_portalocker and lock:
697 fileobj = portalocker.LockedFile(filename,mode)
698 else:
699 fileobj = open(filename,mode)
700 return fileobj
701
703 """
704 to be used ONLY for files that on GAE may not be on filesystem
705 """
706 if fileobj:
707 fileobj.close()
708
711
713 if getattr(self,'driver',None) != None:
714 return
715 drivers_available = [driver for driver in self.drivers
716 if driver in globals()]
717 if uri:
718 items = uri.split('://',1)[0].split(':')
719 request_driver = items[1] if len(items)>1 else None
720 else:
721 request_driver = None
722 request_driver = request_driver or adapter_args.get('driver')
723 if request_driver:
724 if request_driver in drivers_available:
725 self.driver_name = request_driver
726 self.driver = globals().get(request_driver)
727 else:
728 raise RuntimeError("driver %s not available" % request_driver)
729 elif drivers_available:
730 self.driver_name = drivers_available[0]
731 self.driver = globals().get(self.driver_name)
732 else:
733 raise RuntimeError("no driver available %s" % str(self.drivers))
734
735 - def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
736 credential_decoder=IDENTITY, driver_args={},
737 adapter_args={},do_connect=True, after_connection=None):
738 self.db = db
739 self.dbengine = "None"
740 self.uri = uri
741 self.pool_size = pool_size
742 self.folder = folder
743 self.db_codec = db_codec
744 self._after_connection = after_connection
745 class Dummy(object):
746 lastrowid = 1
747 def __getattr__(self, value):
748 return lambda *a, **b: []
749 self.connection = Dummy()
750 self.cursor = Dummy()
751
753 return '%s_sequence' % tablename
754
756 return '%s_sequence' % tablename
757
760
761 - def create_table(self, table,
762 migrate=True,
763 fake_migrate=False,
764 polymodel=None):
765 db = table._db
766 fields = []
767
768 postcreation_fields = []
769 sql_fields = {}
770 sql_fields_aux = {}
771 TFK = {}
772 tablename = table._tablename
773 sortable = 0
774 types = self.types
775 for field in table:
776 sortable += 1
777 field_name = field.name
778 field_type = field.type
779 if isinstance(field_type,SQLCustomType):
780 ftype = field_type.native or field_type.type
781 elif field_type.startswith('reference'):
782 referenced = field_type[10:].strip()
783 if referenced == '.':
784 referenced = tablename
785 constraint_name = self.constraint_name(tablename, field_name)
786 if not '.' in referenced \
787 and referenced != tablename \
788 and hasattr(table,'_primarykey'):
789 ftype = types['integer']
790 else:
791 if hasattr(table,'_primarykey'):
792 rtablename,rfieldname = referenced.split('.')
793 rtable = db[rtablename]
794 rfield = rtable[rfieldname]
795
796 if rfieldname in rtable._primarykey or \
797 rfield.unique:
798 ftype = types[rfield.type[:9]] % \
799 dict(length=rfield.length)
800
801 if not rfield.unique and len(rtable._primarykey)>1:
802
803 if rtablename not in TFK:
804 TFK[rtablename] = {}
805 TFK[rtablename][rfieldname] = field_name
806 else:
807 ftype = ftype + \
808 types['reference FK'] % dict(
809 constraint_name = constraint_name,
810 foreign_key = '%s (%s)' % (rtablename,
811 rfieldname),
812 table_name = tablename,
813 field_name = field_name,
814 on_delete_action=field.ondelete)
815 else:
816
817 if referenced in db:
818 id_fieldname = db[referenced]._id.name
819 elif referenced == tablename:
820 id_fieldname = table._id.name
821 else:
822 id_fieldname = 'id'
823 ftype = types[field_type[:9]] % dict(
824 index_name = field_name+'__idx',
825 field_name = field_name,
826 constraint_name = constraint_name,
827 foreign_key = '%s (%s)' % (referenced,
828 id_fieldname),
829 on_delete_action=field.ondelete)
830 elif field_type.startswith('list:reference'):
831 ftype = types[field_type[:14]]
832 elif field_type.startswith('decimal'):
833 precision, scale = map(int,field_type[8:-1].split(','))
834 ftype = types[field_type[:7]] % \
835 dict(precision=precision,scale=scale)
836 elif field_type.startswith('geo'):
837 if not hasattr(self,'srid'):
838 raise RuntimeError('Adapter does not support geometry')
839 srid = self.srid
840 geotype, parms = field_type[:-1].split('(')
841 if not geotype in types:
842 raise SyntaxError(
843 'Field: unknown field type: %s for %s' \
844 % (field_type, field_name))
845 ftype = types[geotype]
846 if self.dbengine == 'postgres' and geotype == 'geometry':
847
848 dimension = 2
849 parms = parms.split(',')
850 if len(parms) == 3:
851 schema, srid, dimension = parms
852 elif len(parms) == 2:
853 schema, srid = parms
854 else:
855 schema = parms[0]
856 ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
857 ftype = ftype % dict(schema=schema,
858 tablename=tablename,
859 fieldname=field_name, srid=srid,
860 dimension=dimension)
861 postcreation_fields.append(ftype)
862 elif not field_type in types:
863 raise SyntaxError('Field: unknown field type: %s for %s' % \
864 (field_type, field_name))
865 else:
866 ftype = types[field_type]\
867 % dict(length=field.length)
868 if not field_type.startswith('id') and \
869 not field_type.startswith('reference'):
870 if field.notnull:
871 ftype += ' NOT NULL'
872 else:
873 ftype += self.ALLOW_NULL()
874 if field.unique:
875 ftype += ' UNIQUE'
876 if field.custom_qualifier:
877 ftype += ' %s' % field.custom_qualifier
878
879
880 sql_fields[field_name] = dict(
881 length=field.length,
882 unique=field.unique,
883 notnull=field.notnull,
884 sortable=sortable,
885 type=str(field_type),
886 sql=ftype)
887
888 if field.notnull and not field.default is None:
889
890
891
892
893
894
895 not_null = self.NOT_NULL(field.default, field_type)
896 ftype = ftype.replace('NOT NULL', not_null)
897 sql_fields_aux[field_name] = dict(sql=ftype)
898
899
900 if not (self.dbengine == 'postgres' and \
901 field_type.startswith('geom')):
902 fields.append('%s %s' % (field_name, ftype))
903 other = ';'
904
905
906 if self.dbengine == 'mysql':
907 if not hasattr(table, "_primarykey"):
908 fields.append('PRIMARY KEY(%s)' % table._id.name)
909 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
910
911 fields = ',\n '.join(fields)
912 for rtablename in TFK:
913 rfields = TFK[rtablename]
914 pkeys = db[rtablename]._primarykey
915 fkeys = [ rfields[k] for k in pkeys ]
916 fields = fields + ',\n ' + \
917 types['reference TFK'] % dict(
918 table_name = tablename,
919 field_name=', '.join(fkeys),
920 foreign_table = rtablename,
921 foreign_key = ', '.join(pkeys),
922 on_delete_action = field.ondelete)
923
924 if getattr(table,'_primarykey',None):
925 query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
926 (tablename, fields,
927 self.PRIMARY_KEY(', '.join(table._primarykey)),other)
928 else:
929 query = "CREATE TABLE %s(\n %s\n)%s" % \
930 (tablename, fields, other)
931
932 if self.uri.startswith('sqlite:///') \
933 or self.uri.startswith('spatialite:///'):
934 path_encoding = sys.getfilesystemencoding() \
935 or locale.getdefaultlocale()[1] or 'utf8'
936 dbpath = self.uri[9:self.uri.rfind('/')]\
937 .decode('utf8').encode(path_encoding)
938 else:
939 dbpath = self.folder
940
941 if not migrate:
942 return query
943 elif self.uri.startswith('sqlite:memory')\
944 or self.uri.startswith('spatialite:memory'):
945 table._dbt = None
946 elif isinstance(migrate, str):
947 table._dbt = pjoin(dbpath, migrate)
948 else:
949 table._dbt = pjoin(
950 dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
951
952 if table._dbt:
953 table._loggername = pjoin(dbpath, 'sql.log')
954 logfile = self.file_open(table._loggername, 'a')
955 else:
956 logfile = None
957 if not table._dbt or not self.file_exists(table._dbt):
958 if table._dbt:
959 logfile.write('timestamp: %s\n'
960 % datetime.datetime.today().isoformat())
961 logfile.write(query + '\n')
962 if not fake_migrate:
963 self.create_sequence_and_triggers(query,table)
964 table._db.commit()
965
966
967 for query in postcreation_fields:
968 self.execute(query)
969 table._db.commit()
970 if table._dbt:
971 tfile = self.file_open(table._dbt, 'w')
972 pickle.dump(sql_fields, tfile)
973 self.file_close(tfile)
974 if fake_migrate:
975 logfile.write('faked!\n')
976 else:
977 logfile.write('success!\n')
978 else:
979 tfile = self.file_open(table._dbt, 'r')
980 try:
981 sql_fields_old = pickle.load(tfile)
982 except EOFError:
983 self.file_close(tfile)
984 self.file_close(logfile)
985 raise RuntimeError('File %s appears corrupted' % table._dbt)
986 self.file_close(tfile)
987 if sql_fields != sql_fields_old:
988 self.migrate_table(table,
989 sql_fields, sql_fields_old,
990 sql_fields_aux, logfile,
991 fake_migrate=fake_migrate)
992 self.file_close(logfile)
993 return query
994
995 - def migrate_table(
996 self,
997 table,
998 sql_fields,
999 sql_fields_old,
1000 sql_fields_aux,
1001 logfile,
1002 fake_migrate=False,
1003 ):
1004 db = table._db
1005 db._migrated.append(table._tablename)
1006 tablename = table._tablename
1007 def fix(item):
1008 k,v=item
1009 if not isinstance(v,dict):
1010 v=dict(type='unkown',sql=v)
1011 return k.lower(),v
1012
1013
1014 sql_fields = dict(map(fix,sql_fields.iteritems()))
1015 sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
1016 sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
1017 if db._debug:
1018 logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
1019
1020 keys = sql_fields.keys()
1021 for key in sql_fields_old:
1022 if not key in keys:
1023 keys.append(key)
1024 new_add = self.concat_add(tablename)
1025
1026 metadata_change = False
1027 sql_fields_current = copy.copy(sql_fields_old)
1028 for key in keys:
1029 query = None
1030 if not key in sql_fields_old:
1031 sql_fields_current[key] = sql_fields[key]
1032 if self.dbengine in ('postgres',) and \
1033 sql_fields[key]['type'].startswith('geometry'):
1034
1035 query = [ sql_fields[key]['sql'] ]
1036 else:
1037 query = ['ALTER TABLE %s ADD %s %s;' % \
1038 (tablename, key,
1039 sql_fields_aux[key]['sql'].replace(', ', new_add))]
1040 metadata_change = True
1041 elif self.dbengine in ('sqlite', 'spatialite'):
1042 if key in sql_fields:
1043 sql_fields_current[key] = sql_fields[key]
1044 metadata_change = True
1045 elif not key in sql_fields:
1046 del sql_fields_current[key]
1047 ftype = sql_fields_old[key]['type']
1048 if self.dbengine in ('postgres',) \
1049 and ftype.startswith('geometry'):
1050 geotype, parms = ftype[:-1].split('(')
1051 schema = parms.split(',')[0]
1052 query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" % dict(schema=schema, table=tablename, field=key,) ]
1053 elif not self.dbengine in ('firebird',):
1054 query = ['ALTER TABLE %s DROP COLUMN %s;'
1055 % (tablename, key)]
1056 else:
1057 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
1058 metadata_change = True
1059 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
1060 and not (key in table.fields and
1061 isinstance(table[key].type, SQLCustomType)) \
1062 and not sql_fields[key]['type'].startswith('reference')\
1063 and not sql_fields[key]['type'].startswith('double')\
1064 and not sql_fields[key]['type'].startswith('id'):
1065 sql_fields_current[key] = sql_fields[key]
1066 t = tablename
1067 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
1068 if not self.dbengine in ('firebird',):
1069 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
1070 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
1071 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
1072 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
1073 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
1074 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
1075 else:
1076 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
1077 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
1078 'ALTER TABLE %s DROP %s;' % (t, key),
1079 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
1080 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
1081 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
1082 metadata_change = True
1083 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
1084 sql_fields_current[key] = sql_fields[key]
1085 metadata_change = True
1086
1087 if query:
1088 logfile.write('timestamp: %s\n'
1089 % datetime.datetime.today().isoformat())
1090 db['_lastsql'] = '\n'.join(query)
1091 for sub_query in query:
1092 logfile.write(sub_query + '\n')
1093 if not fake_migrate:
1094 self.execute(sub_query)
1095
1096
1097
1098 if db._adapter.commit_on_alter_table:
1099 db.commit()
1100 tfile = self.file_open(table._dbt, 'w')
1101 pickle.dump(sql_fields_current, tfile)
1102 self.file_close(tfile)
1103 logfile.write('success!\n')
1104 else:
1105 logfile.write('faked!\n')
1106 elif metadata_change:
1107 tfile = self.file_open(table._dbt, 'w')
1108 pickle.dump(sql_fields_current, tfile)
1109 self.file_close(tfile)
1110
1111 if metadata_change and \
1112 not (query and self.dbengine in ('mysql','oracle','firebird')):
1113 db.commit()
1114 tfile = self.file_open(table._dbt, 'w')
1115 pickle.dump(sql_fields_current, tfile)
1116 self.file_close(tfile)
1117
1118 - def LOWER(self, first):
1120
1121 - def UPPER(self, first):
1123
1124 - def COUNT(self, first, distinct=None):
1125 return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
1126 % self.expand(first)
1127
1129 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
1130
1131 - def EPOCH(self, first):
1133
1136
1139
1142
1145
1148
1149 - def NOT_NULL(self, default, field_type):
1150 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
1151
1153 expressions = [self.expand(first)]+[self.expand(e) for e in second]
1154 return 'COALESCE(%s)' % ','.join(expressions)
1155
1158
1159 - def RAW(self, first):
1161
1164
1166 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1167
1169 return 'PRIMARY KEY(%s)' % key
1170
1171 - def _drop(self, table, mode):
1172 return ['DROP TABLE %s;' % table]
1173
1174 - def drop(self, table, mode=''):
1190
1191 - def _insert(self, table, fields):
1192 if fields:
1193 keys = ','.join(f.name for f, v in fields)
1194 values = ','.join(self.expand(v, f.type) for f, v in fields)
1195 return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
1196 else:
1197 return self._insert_empty(table)
1198
1200 return 'INSERT INTO %s DEFAULT VALUES;' % table
1201
1202 - def insert(self, table, fields):
1203 query = self._insert(table,fields)
1204 try:
1205 self.execute(query)
1206 except Exception:
1207 e = sys.exc_info()[1]
1208 if hasattr(table,'_on_insert_error'):
1209 return table._on_insert_error(table,fields,e)
1210 raise e
1211 if hasattr(table,'_primarykey'):
1212 return dict([(k[0].name, k[1]) for k in fields \
1213 if k[0].name in table._primarykey])
1214 id = self.lastrowid(table)
1215 if not isinstance(id,int):
1216 return id
1217 rid = Reference(id)
1218 (rid._table, rid._record) = (table, None)
1219 return rid
1220
1222 return [self.insert(table,item) for item in items]
1223
1224 - def NOT(self, first):
1226
1227 - def AND(self, first, second):
1229
1230 - def OR(self, first, second):
1232
1233 - def BELONGS(self, first, second):
1234 if isinstance(second, str):
1235 return '(%s IN (%s))' % (self.expand(first), second[:-1])
1236 elif not second:
1237 return '(1=0)'
1238 items = ','.join(self.expand(item, first.type) for item in second)
1239 return '(%s IN (%s))' % (self.expand(first), items)
1240
1241 - def REGEXP(self, first, second):
1242 "regular expression operator"
1243 raise NotImplementedError
1244
1245 - def LIKE(self, first, second):
1246 "case sensitive like operator"
1247 raise NotImplementedError
1248
1249 - def ILIKE(self, first, second):
1250 "case in-sensitive like operator"
1251 return '(%s LIKE %s)' % (self.expand(first),
1252 self.expand(second, 'string'))
1253
1255 return '(%s LIKE %s)' % (self.expand(first),
1256 self.expand(second+'%', 'string'))
1257
1259 return '(%s LIKE %s)' % (self.expand(first),
1260 self.expand('%'+second, 'string'))
1261
1262 - def CONTAINS(self,first,second,case_sensitive=False):
1263 if first.type in ('string','text', 'json'):
1264 second = Expression(None,self.CONCAT('%',Expression(
1265 None,self.REPLACE(second,('%','%%'))),'%'))
1266 elif first.type.startswith('list:'):
1267 second = Expression(None,self.CONCAT('%|',Expression(None,self.REPLACE(
1268 Expression(None,self.REPLACE(second,('%','%%'))),('|','||'))),'|%'))
1269 op = case_sensitive and self.LIKE or self.ILIKE
1270 return op(first,second)
1271
1272 - def EQ(self, first, second=None):
1277
1278 - def NE(self, first, second=None):
1283
1284 - def LT(self,first,second=None):
1285 if second is None:
1286 raise RuntimeError("Cannot compare %s < None" % first)
1287 return '(%s < %s)' % (self.expand(first),
1288 self.expand(second,first.type))
1289
1290 - def LE(self,first,second=None):
1291 if second is None:
1292 raise RuntimeError("Cannot compare %s <= None" % first)
1293 return '(%s <= %s)' % (self.expand(first),
1294 self.expand(second,first.type))
1295
1296 - def GT(self,first,second=None):
1297 if second is None:
1298 raise RuntimeError("Cannot compare %s > None" % first)
1299 return '(%s > %s)' % (self.expand(first),
1300 self.expand(second,first.type))
1301
1302 - def GE(self,first,second=None):
1303 if second is None:
1304 raise RuntimeError("Cannot compare %s >= None" % first)
1305 return '(%s >= %s)' % (self.expand(first),
1306 self.expand(second,first.type))
1307
1309 return ftype in ('integer','boolean','double','bigint') or \
1310 ftype.startswith('decimal')
1311
1312 - def REPLACE(self, first, (second, third)):
1313 return 'REPLACE(%s,%s,%s)' % (self.expand(first,'string'),
1314 self.expand(second,'string'),
1315 self.expand(third,'string'))
1316
1319
1320 - def ADD(self, first, second):
1326
1327 - def SUB(self, first, second):
1330
1331 - def MUL(self, first, second):
1334
1335 - def DIV(self, first, second):
1338
1339 - def MOD(self, first, second):
1342
1343 - def AS(self, first, second):
1345
1346 - def ON(self, first, second):
1350
1353
1354 - def COMMA(self, first, second):
1356
1357 - def expand(self, expression, field_type=None):
1358 if isinstance(expression, Field):
1359 return '%s.%s' % (expression.tablename, expression.name)
1360 elif isinstance(expression, (Expression, Query)):
1361 first = expression.first
1362 second = expression.second
1363 op = expression.op
1364 optional_args = expression.optional_args or {}
1365 if not second is None:
1366 return op(first, second, **optional_args)
1367 elif not first is None:
1368 return op(first,**optional_args)
1369 elif isinstance(op, str):
1370 if op.endswith(';'):
1371 op=op[:-1]
1372 return '(%s)' % op
1373 else:
1374 return op()
1375 elif field_type:
1376 return str(self.represent(expression,field_type))
1377 elif isinstance(expression,(list,tuple)):
1378 return ','.join(self.represent(item,field_type) \
1379 for item in expression)
1380 elif isinstance(expression, bool):
1381 return '1' if expression else '0'
1382 else:
1383 return str(expression)
1384
1387
1388 - def alias(self, table, alias):
1389 """
1390 Given a table object, makes a new table object
1391 with alias name.
1392 """
1393 other = copy.copy(table)
1394 other['_ot'] = other._ot or other._tablename
1395 other['ALL'] = SQLALL(other)
1396 other['_tablename'] = alias
1397 for fieldname in other.fields:
1398 other[fieldname] = copy.copy(other[fieldname])
1399 other[fieldname]._tablename = alias
1400 other[fieldname].tablename = alias
1401 other[fieldname].table = other
1402 table._db[alias] = other
1403 return other
1404
1406 tablename = table._tablename
1407 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
1408
1410
1411 if table._dbt:
1412 logfile = self.file_open(table._loggername, 'a')
1413 else:
1414 class Logfile(object):
1415 def write(self, value):
1416 pass
1417 def close(self):
1418 pass
1419 logfile = Logfile()
1420
1421 try:
1422 queries = table._db._adapter._truncate(table, mode)
1423 for query in queries:
1424 logfile.write(query + '\n')
1425 self.execute(query)
1426 table._db.commit()
1427 logfile.write('success!\n')
1428 finally:
1429 logfile.close()
1430
1431 - def _update(self, tablename, query, fields):
1432 if query:
1433 if use_common_filters(query):
1434 query = self.common_filter(query, [tablename])
1435 sql_w = ' WHERE ' + self.expand(query)
1436 else:
1437 sql_w = ''
1438 sql_v = ','.join(['%s=%s' % (field.name,
1439 self.expand(value, field.type)) \
1440 for (field, value) in fields])
1441 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1442
1443 - def update(self, tablename, query, fields):
1444 sql = self._update(tablename, query, fields)
1445 try:
1446 self.execute(sql)
1447 except Exception:
1448 e = sys.exc_info()[1]
1449 table = self.db[tablename]
1450 if hasattr(table,'_on_update_error'):
1451 return table._on_update_error(table,query,fields,e)
1452 raise e
1453 try:
1454 return self.cursor.rowcount
1455 except:
1456 return None
1457
1458 - def _delete(self, tablename, query):
1459 if query:
1460 if use_common_filters(query):
1461 query = self.common_filter(query, [tablename])
1462 sql_w = ' WHERE ' + self.expand(query)
1463 else:
1464 sql_w = ''
1465 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1466
1467 - def delete(self, tablename, query):
1468 sql = self._delete(tablename, query)
1469
1470 db = self.db
1471 table = db[tablename]
1472 if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by:
1473 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1474
1475 self.execute(sql)
1476 try:
1477 counter = self.cursor.rowcount
1478 except:
1479 counter = None
1480
1481 if self.dbengine in ('sqlite', 'spatialite') and counter:
1482 for field in table._referenced_by:
1483 if field.type=='reference '+table._tablename \
1484 and field.ondelete=='CASCADE':
1485 db(field.belongs(deleted)).delete()
1486
1487 return counter
1488
1490 tablenames = self.tables(query)
1491 if len(tablenames)==1:
1492 return tablenames[0]
1493 elif len(tablenames)<1:
1494 raise RuntimeError("No table selected")
1495 else:
1496 raise RuntimeError("Too many tables selected")
1497
1499 db = self.db
1500 new_fields = []
1501 append = new_fields.append
1502 for item in fields:
1503 if isinstance(item,SQLALL):
1504 new_fields += item._table
1505 elif isinstance(item,str):
1506 if REGEX_TABLE_DOT_FIELD.match(item):
1507 tablename,fieldname = item.split('.')
1508 append(db[tablename][fieldname])
1509 else:
1510 append(Expression(db,lambda item=item:item))
1511 else:
1512 append(item)
1513
1514 if not new_fields:
1515 for table in tablenames:
1516 for field in db[table]:
1517 append(field)
1518 return new_fields
1519
1520 - def _select(self, query, fields, attributes):
1521 tables = self.tables
1522 for key in set(attributes.keys())-SELECT_ARGS:
1523 raise SyntaxError('invalid select attribute: %s' % key)
1524 args_get = attributes.get
1525 tablenames = tables(query)
1526 tablenames_for_common_filters = tablenames
1527 for field in fields:
1528 if isinstance(field, basestring) \
1529 and REGEX_TABLE_DOT_FIELD.match(field):
1530 tn,fn = field.split('.')
1531 field = self.db[tn][fn]
1532 for tablename in tables(field):
1533 if not tablename in tablenames:
1534 tablenames.append(tablename)
1535
1536 if len(tablenames) < 1:
1537 raise SyntaxError('Set: no tables selected')
1538 self._colnames = map(self.expand, fields)
1539 def geoexpand(field):
1540 if isinstance(field.type,str) and field.type.startswith('geometry'):
1541 field = field.st_astext()
1542 return self.expand(field)
1543 sql_f = ', '.join(map(geoexpand, fields))
1544 sql_o = ''
1545 sql_s = ''
1546 left = args_get('left', False)
1547 inner_join = args_get('join', False)
1548 distinct = args_get('distinct', False)
1549 groupby = args_get('groupby', False)
1550 orderby = args_get('orderby', False)
1551 having = args_get('having', False)
1552 limitby = args_get('limitby', False)
1553 orderby_on_limitby = args_get('orderby_on_limitby', True)
1554 for_update = args_get('for_update', False)
1555 if self.can_select_for_update is False and for_update is True:
1556 raise SyntaxError('invalid select attribute: for_update')
1557 if distinct is True:
1558 sql_s += 'DISTINCT'
1559 elif distinct:
1560 sql_s += 'DISTINCT ON (%s)' % distinct
1561 if inner_join:
1562 icommand = self.JOIN()
1563 if not isinstance(inner_join, (tuple, list)):
1564 inner_join = [inner_join]
1565 ijoint = [t._tablename for t in inner_join
1566 if not isinstance(t,Expression)]
1567 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1568 itables_to_merge={}
1569 [itables_to_merge.update(
1570 dict.fromkeys(tables(t))) for t in ijoinon]
1571 ijoinont = [t.first._tablename for t in ijoinon]
1572 [itables_to_merge.pop(t) for t in ijoinont
1573 if t in itables_to_merge]
1574 iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
1575 iexcluded = [t for t in tablenames
1576 if not t in iimportant_tablenames]
1577 if left:
1578 join = attributes['left']
1579 command = self.LEFT_JOIN()
1580 if not isinstance(join, (tuple, list)):
1581 join = [join]
1582 joint = [t._tablename for t in join
1583 if not isinstance(t, Expression)]
1584 joinon = [t for t in join if isinstance(t, Expression)]
1585
1586 tables_to_merge={}
1587 [tables_to_merge.update(
1588 dict.fromkeys(tables(t))) for t in joinon]
1589 joinont = [t.first._tablename for t in joinon]
1590 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1591 tablenames_for_common_filters = [t for t in tablenames
1592 if not t in joinont ]
1593 important_tablenames = joint + joinont + tables_to_merge.keys()
1594 excluded = [t for t in tablenames
1595 if not t in important_tablenames ]
1596 else:
1597 excluded = tablenames
1598
1599 if use_common_filters(query):
1600 query = self.common_filter(query,tablenames_for_common_filters)
1601 sql_w = ' WHERE ' + self.expand(query) if query else ''
1602
1603 if inner_join and not left:
1604 sql_t = ', '.join([self.table_alias(t) for t in iexcluded + \
1605 itables_to_merge.keys()])
1606 for t in ijoinon:
1607 sql_t += ' %s %s' % (icommand, t)
1608 elif not inner_join and left:
1609 sql_t = ', '.join([self.table_alias(t) for t in excluded + \
1610 tables_to_merge.keys()])
1611 if joint:
1612 sql_t += ' %s %s' % (command,
1613 ','.join([self.table_alias(t) for t in joint]))
1614 for t in joinon:
1615 sql_t += ' %s %s' % (command, t)
1616 elif inner_join and left:
1617 all_tables_in_query = set(important_tablenames + \
1618 iimportant_tablenames + \
1619 tablenames)
1620 tables_in_joinon = set(joinont + ijoinont)
1621 tables_not_in_joinon = \
1622 all_tables_in_query.difference(tables_in_joinon)
1623 sql_t = ','.join([self.table_alias(t) for t in tables_not_in_joinon])
1624 for t in ijoinon:
1625 sql_t += ' %s %s' % (icommand, t)
1626 if joint:
1627 sql_t += ' %s %s' % (command,
1628 ','.join([self.table_alias(t) for t in joint]))
1629 for t in joinon:
1630 sql_t += ' %s %s' % (command, t)
1631 else:
1632 sql_t = ', '.join(self.table_alias(t) for t in tablenames)
1633 if groupby:
1634 if isinstance(groupby, (list, tuple)):
1635 groupby = xorify(groupby)
1636 sql_o += ' GROUP BY %s' % self.expand(groupby)
1637 if having:
1638 sql_o += ' HAVING %s' % attributes['having']
1639 if orderby:
1640 if isinstance(orderby, (list, tuple)):
1641 orderby = xorify(orderby)
1642 if str(orderby) == '<random>':
1643 sql_o += ' ORDER BY %s' % self.RANDOM()
1644 else:
1645 sql_o += ' ORDER BY %s' % self.expand(orderby)
1646 if limitby:
1647 if orderby_on_limitby and not orderby and tablenames:
1648 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in (hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey or [self.db[t]._id.name])])
1649
1650 sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1651 if for_update and self.can_select_for_update is True:
1652 sql = sql.rstrip(';') + ' FOR UPDATE;'
1653 return sql
1654
1655 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1656 if limitby:
1657 (lmin, lmax) = limitby
1658 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1659 return 'SELECT %s %s FROM %s%s%s;' % \
1660 (sql_s, sql_f, sql_t, sql_w, sql_o)
1661
1663 return self.cursor.fetchall()
1664
1666 args_get = attributes.get
1667 cache = args_get('cache',None)
1668 if not cache:
1669 self.execute(sql)
1670 rows = self._fetchall()
1671 else:
1672 (cache_model, time_expire) = cache
1673 key = self.uri + '/' + sql + '/rows'
1674 if len(key)>200: key = hashlib_md5(key).hexdigest()
1675 def _select_aux2():
1676 self.execute(sql)
1677 return self._fetchall()
1678 rows = cache_model(key,_select_aux2,time_expire)
1679 if isinstance(rows,tuple):
1680 rows = list(rows)
1681 limitby = args_get('limitby', None) or (0,)
1682 rows = self.rowslice(rows,limitby[0],None)
1683 processor = args_get('processor',self.parse)
1684 cacheable = args_get('cacheable',False)
1685 return processor(rows,fields,self._colnames,cacheable=cacheable)
1686
1687 - def select(self, query, fields, attributes):
1688 """
1689 Always returns a Rows object, possibly empty.
1690 """
1691 sql = self._select(query, fields, attributes)
1692 cache = attributes.get('cache', None)
1693 if cache and attributes.get('cacheable',False):
1694 del attributes['cache']
1695 (cache_model, time_expire) = cache
1696 key = self.uri + '/' + sql
1697 if len(key)>200: key = hashlib_md5(key).hexdigest()
1698 args = (sql,fields,attributes)
1699 return cache_model(
1700 key,
1701 lambda self=self,args=args:self._select_aux(*args),
1702 time_expire)
1703 else:
1704 return self._select_aux(sql,fields,attributes)
1705
1706 - def _count(self, query, distinct=None):
1707 tablenames = self.tables(query)
1708 if query:
1709 if use_common_filters(query):
1710 query = self.common_filter(query, tablenames)
1711 sql_w = ' WHERE ' + self.expand(query)
1712 else:
1713 sql_w = ''
1714 sql_t = ','.join(self.table_alias(t) for t in tablenames)
1715 if distinct:
1716 if isinstance(distinct,(list, tuple)):
1717 distinct = xorify(distinct)
1718 sql_d = self.expand(distinct)
1719 return 'SELECT count(DISTINCT %s) FROM %s%s;' % \
1720 (sql_d, sql_t, sql_w)
1721 return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
1722
1723 - def count(self, query, distinct=None):
1724 self.execute(self._count(query, distinct))
1725 return self.cursor.fetchone()[0]
1726
1738
1741
1744
1747
1750
1753
1756
1759
1762
1764 return '%s_%s__constraint' % (table,fieldname)
1765
1768
1770 if not self.connection: return None
1771 command = a[0]
1772 if hasattr(self,'filter_sql_command'):
1773 command = self.filter_sql_command(command)
1774 if self.db._debug:
1775 LOGGER.debug('SQL: %s' % command)
1776 self.db._lastsql = command
1777 t0 = time.time()
1778 ret = self.cursor.execute(command, *a[1:], **b)
1779 self.db._timings.append((command,time.time()-t0))
1780 del self.db._timings[:-TIMINGSSIZE]
1781 return ret
1782
1785
1787 field_is_type = fieldtype.startswith
1788 if isinstance(obj, CALLABLETYPES):
1789 obj = obj()
1790 if isinstance(fieldtype, SQLCustomType):
1791 value = fieldtype.encoder(obj)
1792 if fieldtype.type in ('string','text', 'json'):
1793 return self.adapt(value)
1794 return value
1795 if isinstance(obj, (Expression, Field)):
1796 return str(obj)
1797 if field_is_type('list:'):
1798 if not obj:
1799 obj = []
1800 elif not isinstance(obj, (list, tuple)):
1801 obj = [obj]
1802 if field_is_type('list:string'):
1803 obj = map(str,obj)
1804 else:
1805 obj = map(int,obj)
1806
1807 if isinstance(obj, (list, tuple)) and (not fieldtype == "json"):
1808 obj = bar_encode(obj)
1809 if obj is None:
1810 return 'NULL'
1811 if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']:
1812 return 'NULL'
1813 r = self.represent_exceptions(obj, fieldtype)
1814 if not r is None:
1815 return r
1816 if fieldtype == 'boolean':
1817 if obj and not str(obj)[:1].upper() in '0F':
1818 return self.smart_adapt(self.TRUE)
1819 else:
1820 return self.smart_adapt(self.FALSE)
1821 if fieldtype == 'id' or fieldtype == 'integer':
1822 return str(long(obj))
1823 if field_is_type('decimal'):
1824 return str(obj)
1825 elif field_is_type('reference'):
1826 if fieldtype.find('.')>0:
1827 return repr(obj)
1828 elif isinstance(obj, (Row, Reference)):
1829 return str(obj['id'])
1830 return str(long(obj))
1831 elif fieldtype == 'double':
1832 return repr(float(obj))
1833 if isinstance(obj, unicode):
1834 obj = obj.encode(self.db_codec)
1835 if fieldtype == 'blob':
1836 obj = base64.b64encode(str(obj))
1837 elif fieldtype == 'date':
1838 if isinstance(obj, (datetime.date, datetime.datetime)):
1839 obj = obj.isoformat()[:10]
1840 else:
1841 obj = str(obj)
1842 elif fieldtype == 'datetime':
1843 if isinstance(obj, datetime.datetime):
1844 obj = obj.isoformat(self.T_SEP)[:19]
1845 elif isinstance(obj, datetime.date):
1846 obj = obj.isoformat()[:10]+' 00:00:00'
1847 else:
1848 obj = str(obj)
1849 elif fieldtype == 'time':
1850 if isinstance(obj, datetime.time):
1851 obj = obj.isoformat()[:10]
1852 else:
1853 obj = str(obj)
1854 elif fieldtype == 'json':
1855 if not self.native_json:
1856 if have_serializers:
1857 obj = serializers.json(obj)
1858 elif simplejson:
1859 obj = simplejson.dumps(obj)
1860 else:
1861 raise RuntimeError("missing simplejson")
1862 if not isinstance(obj,bytes):
1863 obj = bytes(obj)
1864 try:
1865 obj.decode(self.db_codec)
1866 except:
1867 obj = obj.decode('latin1').encode(self.db_codec)
1868 return self.adapt(obj)
1869
1872
1875
1876 - def rowslice(self, rows, minimum=0, maximum=None):
1877 """
1878 By default this function does nothing;
1879 overload when db does not do slicing.
1880 """
1881 return rows
1882
1883 - def parse_value(self, value, field_type, blob_decode=True):
1884 if field_type != 'blob' and isinstance(value, str):
1885 try:
1886 value = value.decode(self.db._db_codec)
1887 except Exception:
1888 pass
1889 if isinstance(value, unicode):
1890 value = value.encode('utf-8')
1891 if isinstance(field_type, SQLCustomType):
1892 value = field_type.decoder(value)
1893 if not isinstance(field_type, str) or value is None:
1894 return value
1895 elif field_type in ('string', 'text', 'password', 'upload', 'dict'):
1896 return value
1897 elif field_type.startswith('geo'):
1898 return value
1899 elif field_type == 'blob' and not blob_decode:
1900 return value
1901 else:
1902 key = REGEX_TYPE.match(field_type).group(0)
1903 return self.parsemap[key](value,field_type)
1904
1906 referee = field_type[10:].strip()
1907 if not '.' in referee:
1908 value = Reference(value)
1909 value._table, value._record = self.db[referee], None
1910 return value
1911
1913 return value == self.TRUE or str(value)[:1].lower() == 't'
1914
1916 if isinstance(value, datetime.datetime):
1917 return value.date()
1918 if not isinstance(value, (datetime.date,datetime.datetime)):
1919 (y, m, d) = map(int, str(value)[:10].strip().split('-'))
1920 value = datetime.date(y, m, d)
1921 return value
1922
1924 if not isinstance(value, datetime.time):
1925 time_items = map(int,str(value)[:8].strip().split(':')[:3])
1926 if len(time_items) == 3:
1927 (h, mi, s) = time_items
1928 else:
1929 (h, mi, s) = time_items + [0]
1930 value = datetime.time(h, mi, s)
1931 return value
1932
1934 if not isinstance(value, datetime.datetime):
1935 value = str(value)
1936 date_part,time_part,timezone = value[:10],value[11:19],value[19:]
1937 if '+' in timezone:
1938 ms,tz = timezone.split('+')
1939 h,m = tz.split(':')
1940 dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
1941 elif '-' in timezone:
1942 ms,tz = timezone.split('-')
1943 h,m = tz.split(':')
1944 dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
1945 else:
1946 dt = None
1947 (y, m, d) = map(int,date_part.split('-'))
1948 time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
1949 while len(time_parts)<3: time_parts.append(0)
1950 time_items = map(int,time_parts)
1951 (h, mi, s) = time_items
1952 value = datetime.datetime(y, m, d, h, mi, s)
1953 if dt:
1954 value = value + dt
1955 return value
1956
1958 return base64.b64decode(str(value))
1959
1961 decimals = int(field_type[8:-1].split(',')[-1])
1962 if self.dbengine in ('sqlite', 'spatialite'):
1963 value = ('%.' + str(decimals) + 'f') % value
1964 if not isinstance(value, decimal.Decimal):
1965 value = decimal.Decimal(str(value))
1966 return value
1967
1972
1977
1982
1983 - def parse_id(self, value, field_type):
1985
1988
1991
1993 if not self.native_json:
1994 if not isinstance(value, basestring):
1995 raise RuntimeError('json data not a string')
1996 if isinstance(value, unicode):
1997 value = value.encode('utf-8')
1998 if have_serializers:
1999 value = serializers.loads_json(value)
2000 elif simplejson:
2001 value = simplejson.loads(value)
2002 else:
2003 raise RuntimeError("missing simplejson")
2004 return value
2005
2007 self.parsemap = {
2008 'id':self.parse_id,
2009 'integer':self.parse_integer,
2010 'bigint':self.parse_integer,
2011 'float':self.parse_double,
2012 'double':self.parse_double,
2013 'reference':self.parse_reference,
2014 'boolean':self.parse_boolean,
2015 'date':self.parse_date,
2016 'time':self.parse_time,
2017 'datetime':self.parse_datetime,
2018 'blob':self.parse_blob,
2019 'decimal':self.parse_decimal,
2020 'json':self.parse_json,
2021 'list:integer':self.parse_list_integers,
2022 'list:reference':self.parse_list_references,
2023 'list:string':self.parse_list_strings,
2024 }
2025
2026 - def parse(self, rows, fields, colnames, blob_decode=True,
2027 cacheable = False):
2028 self.build_parsemap()
2029 db = self.db
2030 virtualtables = []
2031 new_rows = []
2032 tmps = []
2033 for colname in colnames:
2034 if not REGEX_TABLE_DOT_FIELD.match(colname):
2035 tmps.append(None)
2036 else:
2037 (tablename, fieldname) = colname.split('.')
2038 table = db[tablename]
2039 field = table[fieldname]
2040 ft = field.type
2041 tmps.append((tablename,fieldname,table,field,ft))
2042 for (i,row) in enumerate(rows):
2043 new_row = Row()
2044 for (j,colname) in enumerate(colnames):
2045 value = row[j]
2046 tmp = tmps[j]
2047 if tmp:
2048 (tablename,fieldname,table,field,ft) = tmp
2049 if tablename in new_row:
2050 colset = new_row[tablename]
2051 else:
2052 colset = new_row[tablename] = Row()
2053 if tablename not in virtualtables:
2054 virtualtables.append(tablename)
2055 value = self.parse_value(value,ft,blob_decode)
2056 if field.filter_out:
2057 value = field.filter_out(value)
2058 colset[fieldname] = value
2059
2060
2061 if ft=='id' and fieldname!='id' and \
2062 not 'id' in table.fields:
2063 colset['id'] = value
2064
2065 if ft == 'id' and not cacheable:
2066
2067
2068
2069 if isinstance(self, GoogleDatastoreAdapter):
2070 id = value.key().id_or_name()
2071 colset[fieldname] = id
2072 colset.gae_item = value
2073 else:
2074 id = value
2075 colset.update_record = RecordUpdater(colset,table,id)
2076 colset.delete_record = RecordDeleter(table,id)
2077 for rfield in table._referenced_by:
2078 referee_link = db._referee_name and \
2079 db._referee_name % dict(
2080 table=rfield.tablename,field=rfield.name)
2081 if referee_link and not referee_link in colset:
2082 colset[referee_link] = LazySet(rfield,id)
2083 else:
2084 if not '_extra' in new_row:
2085 new_row['_extra'] = Row()
2086 new_row['_extra'][colname] = \
2087 self.parse_value(value,
2088 fields[j].type,blob_decode)
2089 new_column_name = \
2090 REGEX_SELECT_AS_PARSER.search(colname)
2091 if not new_column_name is None:
2092 column_name = new_column_name.groups(0)
2093 setattr(new_row,column_name[0],value)
2094 new_rows.append(new_row)
2095 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
2096
2097 for tablename in virtualtables:
2098
2099 table = db[tablename]
2100 fields_virtual = [(f,v) for (f,v) in table.iteritems()
2101 if isinstance(v,FieldVirtual)]
2102 fields_lazy = [(f,v) for (f,v) in table.iteritems()
2103 if isinstance(v,FieldMethod)]
2104 if fields_virtual or fields_lazy:
2105 for row in rowsobj.records:
2106 box = row[tablename]
2107 for f,v in fields_virtual:
2108 box[f] = v.f(row)
2109 for f,v in fields_lazy:
2110 box[f] = (v.handler or VirtualCommand)(v.f,row)
2111
2112
2113 for item in table.virtualfields:
2114 try:
2115 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
2116 except (KeyError, AttributeError):
2117
2118 pass
2119 return rowsobj
2120
2122 tenant_fieldname = self.db._request_tenant
2123
2124 for tablename in tablenames:
2125 table = self.db[tablename]
2126
2127
2128 if table._common_filter != None:
2129 query = query & table._common_filter(query)
2130
2131
2132 if tenant_fieldname in table:
2133 default = table[tenant_fieldname].default
2134 if not default is None:
2135 newquery = table[tenant_fieldname] == default
2136 if query is None:
2137 query = newquery
2138 else:
2139 query = query & newquery
2140 return query
2141
2142 - def CASE(self,query,t,f):
2143 def represent(x):
2144 types = {type(True):'boolean',type(0):'integer',type(1.0):'double'}
2145 if x is None: return 'NULL'
2146 elif isinstance(x,Expression): return str(x)
2147 else: return self.represent(x,types.get(type(x),'string'))
2148 return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \
2149 (self.expand(query),represent(t),represent(f)))
2150
2156 drivers = ('sqlite2','sqlite3')
2157
2158 can_select_for_update = None
2159
2161 return "web2py_extract('%s',%s)" % (what, self.expand(field))
2162
2163 @staticmethod
2165 table = {
2166 'year': (0, 4),
2167 'month': (5, 7),
2168 'day': (8, 10),
2169 'hour': (11, 13),
2170 'minute': (14, 16),
2171 'second': (17, 19),
2172 }
2173 try:
2174 if lookup != 'epoch':
2175 (i, j) = table[lookup]
2176 return int(s[i:j])
2177 else:
2178 return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
2179 except:
2180 return None
2181
2182 @staticmethod
2184 return re.compile(expression).search(item) is not None
2185
2186 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2187 credential_decoder=IDENTITY, driver_args={},
2188 adapter_args={}, do_connect=True, after_connection=None):
2189 self.db = db
2190 self.dbengine = "sqlite"
2191 self.uri = uri
2192 if do_connect: self.find_driver(adapter_args)
2193 self.pool_size = 0
2194 self.folder = folder
2195 self.db_codec = db_codec
2196 self._after_connection = after_connection
2197 self.find_or_make_work_folder()
2198 path_encoding = sys.getfilesystemencoding() \
2199 or locale.getdefaultlocale()[1] or 'utf8'
2200 if uri.startswith('sqlite:memory'):
2201 dbpath = ':memory:'
2202 else:
2203 dbpath = uri.split('://',1)[1]
2204 if dbpath[0] != '/':
2205 if PYTHON_VERSION == 2:
2206 dbpath = pjoin(
2207 self.folder.decode(path_encoding).encode('utf8'), dbpath)
2208 else:
2209 dbpath = pjoin(self.folder, dbpath)
2210 if not 'check_same_thread' in driver_args:
2211 driver_args['check_same_thread'] = False
2212 if not 'detect_types' in driver_args and do_connect:
2213 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2214 def connector(dbpath=dbpath, driver_args=driver_args):
2215 return self.driver.Connection(dbpath, **driver_args)
2216 self.connector = connector
2217 if do_connect: self.reconnect()
2218
2224
2226 tablename = table._tablename
2227 return ['DELETE FROM %s;' % tablename,
2228 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
2229
2232
2233 - def REGEXP(self,first,second):
2234 return '(%s REGEXP %s)' % (self.expand(first),
2235 self.expand(second,'string'))
2236
2237 - def select(self, query, fields, attributes):
2238 """
2239 Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
2240 Note that the entire database, rather than one record, is locked
2241 (it will be locked eventually anyway by the following UPDATE).
2242 """
2243 if attributes.get('for_update', False) and not 'cache' in attributes:
2244 self.execute('BEGIN IMMEDIATE TRANSACTION;')
2245 return super(SQLiteAdapter, self).select(query, fields, attributes)
2246
2248 drivers = ('sqlite3','sqlite2')
2249
2250 types = copy.copy(BaseAdapter.types)
2251 types.update(geometry='GEOMETRY')
2252
2253 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2254 credential_decoder=IDENTITY, driver_args={},
2255 adapter_args={}, do_connect=True, srid=4326, after_connection=None):
2256 self.db = db
2257 self.dbengine = "spatialite"
2258 self.uri = uri
2259 if do_connect: self.find_driver(adapter_args)
2260 self.pool_size = 0
2261 self.folder = folder
2262 self.db_codec = db_codec
2263 self._after_connection = after_connection
2264 self.find_or_make_work_folder()
2265 self.srid = srid
2266 path_encoding = sys.getfilesystemencoding() \
2267 or locale.getdefaultlocale()[1] or 'utf8'
2268 if uri.startswith('spatialite:memory'):
2269 dbpath = ':memory:'
2270 else:
2271 dbpath = uri.split('://',1)[1]
2272 if dbpath[0] != '/':
2273 dbpath = pjoin(
2274 self.folder.decode(path_encoding).encode('utf8'), dbpath)
2275 if not 'check_same_thread' in driver_args:
2276 driver_args['check_same_thread'] = False
2277 if not 'detect_types' in driver_args and do_connect:
2278 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2279 def connector(dbpath=dbpath, driver_args=driver_args):
2280 return self.driver.Connection(dbpath, **driver_args)
2281 self.connector = connector
2282 if do_connect: self.reconnect()
2283
2296
2297
2298
2300 return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
2301 second['precision'], second['options'])
2302
2303 - def ST_ASTEXT(self, first):
2304 return 'AsText(%s)' %(self.expand(first))
2305
2309
2313
2317
2321
2325
2327 return 'Simplify(%s,%s)' %(self.expand(first),
2328 self.expand(second, 'double'))
2329
2333
2337
2339 field_is_type = fieldtype.startswith
2340 if field_is_type('geo'):
2341 srid = 4326
2342 geotype, parms = fieldtype[:-1].split('(')
2343 parms = parms.split(',')
2344 if len(parms) >= 2:
2345 schema, srid = parms[:2]
2346
2347 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2348
2349
2350
2351
2352 return value
2353 return BaseAdapter.represent(self, obj, fieldtype)
2354
2357 drivers = ('zxJDBC_sqlite',)
2358
2359 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
2360 credential_decoder=IDENTITY, driver_args={},
2361 adapter_args={}, do_connect=True, after_connection=None):
2362 self.db = db
2363 self.dbengine = "sqlite"
2364 self.uri = uri
2365 if do_connect: self.find_driver(adapter_args)
2366 self.pool_size = pool_size
2367 self.folder = folder
2368 self.db_codec = db_codec
2369 self._after_connection = after_connection
2370 self.find_or_make_work_folder()
2371 path_encoding = sys.getfilesystemencoding() \
2372 or locale.getdefaultlocale()[1] or 'utf8'
2373 if uri.startswith('sqlite:memory'):
2374 dbpath = ':memory:'
2375 else:
2376 dbpath = uri.split('://',1)[1]
2377 if dbpath[0] != '/':
2378 dbpath = pjoin(
2379 self.folder.decode(path_encoding).encode('utf8'), dbpath)
2380 def connector(dbpath=dbpath,driver_args=driver_args):
2381 return self.driver.connect(
2382 self.driver.getConnection('jdbc:sqlite:'+dbpath),
2383 **driver_args)
2384 self.connector = connector
2385 if do_connect: self.reconnect()
2386
2391
2394
2397 drivers = ('MySQLdb','pymysql')
2398
2399 maxcharlength = 255
2400 commit_on_alter_table = True
2401 support_distributed_transaction = True
2402 types = {
2403 'boolean': 'CHAR(1)',
2404 'string': 'VARCHAR(%(length)s)',
2405 'text': 'LONGTEXT',
2406 'json': 'LONGTEXT',
2407 'password': 'VARCHAR(%(length)s)',
2408 'blob': 'LONGBLOB',
2409 'upload': 'VARCHAR(%(length)s)',
2410 'integer': 'INT',
2411 'bigint': 'BIGINT',
2412 'float': 'FLOAT',
2413 'double': 'DOUBLE',
2414 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2415 'date': 'DATE',
2416 'time': 'TIME',
2417 'datetime': 'DATETIME',
2418 'id': 'INT AUTO_INCREMENT NOT NULL',
2419 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2420 'list:integer': 'LONGTEXT',
2421 'list:string': 'LONGTEXT',
2422 'list:reference': 'LONGTEXT',
2423 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
2424 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2425 }
2426
2429
2432
2434 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
2435 parameters[0], parameters[1])
2436
2437 - def EPOCH(self, first):
2439
2441 return 'CONCAT(%s)' % ','.join(self.expand(x,'string') for x in items)
2442
2443 - def REGEXP(self,first,second):
2444 return '(%s REGEXP %s)' % (self.expand(first),
2445 self.expand(second,'string'))
2446
2447 - def _drop(self,table,mode):
2448
2449 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,
2450 'SET FOREIGN_KEY_CHECKS=1;']
2451
2453 return 'INSERT INTO %s VALUES (DEFAULT);' % table
2454
2457
2461
2464
2467
2468 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
2469
2470 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2471 credential_decoder=IDENTITY, driver_args={},
2472 adapter_args={}, do_connect=True, after_connection=None):
2473 self.db = db
2474 self.dbengine = "mysql"
2475 self.uri = uri
2476 if do_connect: self.find_driver(adapter_args,uri)
2477 self.pool_size = pool_size
2478 self.folder = folder
2479 self.db_codec = db_codec
2480 self._after_connection = after_connection
2481 self.find_or_make_work_folder()
2482 ruri = uri.split('://',1)[1]
2483 m = self.REGEX_URI.match(ruri)
2484 if not m:
2485 raise SyntaxError(
2486 "Invalid URI string in DAL: %s" % self.uri)
2487 user = credential_decoder(m.group('user'))
2488 if not user:
2489 raise SyntaxError('User required')
2490 password = credential_decoder(m.group('password'))
2491 if not password:
2492 password = ''
2493 host = m.group('host')
2494 if not host:
2495 raise SyntaxError('Host name required')
2496 db = m.group('db')
2497 if not db:
2498 raise SyntaxError('Database name required')
2499 port = int(m.group('port') or '3306')
2500 charset = m.group('charset') or 'utf8'
2501 driver_args.update(db=db,
2502 user=credential_decoder(user),
2503 passwd=credential_decoder(password),
2504 host=host,
2505 port=port,
2506 charset=charset)
2507
2508
2509 def connector(driver_args=driver_args):
2510 return self.driver.connect(**driver_args)
2511 self.connector = connector
2512 if do_connect: self.reconnect()
2513
2515 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2516 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2517
2519 self.execute('select last_insert_id();')
2520 return int(self.cursor.fetchone()[0])
2521
2522
2523 -class PostgreSQLAdapter(BaseAdapter):
2524 drivers = ('psycopg2','pg8000')
2525
2526 support_distributed_transaction = True
2527 types = {
2528 'boolean': 'CHAR(1)',
2529 'string': 'VARCHAR(%(length)s)',
2530 'text': 'TEXT',
2531 'json': 'TEXT',
2532 'password': 'VARCHAR(%(length)s)',
2533 'blob': 'BYTEA',
2534 'upload': 'VARCHAR(%(length)s)',
2535 'integer': 'INTEGER',
2536 'bigint': 'BIGINT',
2537 'float': 'FLOAT',
2538 'double': 'FLOAT8',
2539 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2540 'date': 'DATE',
2541 'time': 'TIME',
2542 'datetime': 'TIMESTAMP',
2543 'id': 'SERIAL PRIMARY KEY',
2544 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2545 'list:integer': 'TEXT',
2546 'list:string': 'TEXT',
2547 'list:reference': 'TEXT',
2548 'geometry': 'GEOMETRY',
2549 'geography': 'GEOGRAPHY',
2550 'big-id': 'BIGSERIAL PRIMARY KEY',
2551 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2552 }
2553
2554 - def varquote(self,name):
2555 return varquote_aux(name,'"%s"')
2556
2557 - def adapt(self,obj):
2558 if self.driver_name == 'psycopg2':
2559 return psycopg2_adapt(obj).getquoted()
2560 elif self.driver_name == 'pg8000':
2561 return "'%s'" % str(obj).replace("%","%%").replace("'","''")
2562 else:
2563 return "'%s'" % str(obj).replace("'","''")
2564
2565 - def sequence_name(self,table):
2566 return '%s_id_Seq' % table
2567
2570
2571 - def ADD(self, first, second):
2572 t = first.type
2573 if t in ('text','string','password', 'json', 'upload','blob'):
2574 return '(%s || %s)' % (self.expand(first), self.expand(second, t))
2575 else:
2576 return '(%s + %s)' % (self.expand(first), self.expand(second, t))
2577
2580
2581 - def prepare(self,key):
2582 self.execute("PREPARE TRANSACTION '%s';" % key)
2583
2584 - def commit_prepared(self,key):
2585 self.execute("COMMIT PREPARED '%s';" % key)
2586
2587 - def rollback_prepared(self,key):
2588 self.execute("ROLLBACK PREPARED '%s';" % key)
2589
2590 - def create_sequence_and_triggers(self, query, table, **args):
2591
2592
2593
2594
2595 self.execute(query)
2596
2597 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
2598
2599 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2600 credential_decoder=IDENTITY, driver_args={},
2601 adapter_args={}, do_connect=True, srid=4326,
2602 after_connection=None):
2603 self.db = db
2604 self.dbengine = "postgres"
2605 self.uri = uri
2606 if do_connect: self.find_driver(adapter_args,uri)
2607 self.pool_size = pool_size
2608 self.folder = folder
2609 self.db_codec = db_codec
2610 self._after_connection = after_connection
2611 self.srid = srid
2612 self.find_or_make_work_folder()
2613 ruri = uri.split('://',1)[1]
2614 m = self.REGEX_URI.match(ruri)
2615 if not m:
2616 raise SyntaxError("Invalid URI string in DAL")
2617 user = credential_decoder(m.group('user'))
2618 if not user:
2619 raise SyntaxError('User required')
2620 password = credential_decoder(m.group('password'))
2621 if not password:
2622 password = ''
2623 host = m.group('host')
2624 if not host:
2625 raise SyntaxError('Host name required')
2626 db = m.group('db')
2627 if not db:
2628 raise SyntaxError('Database name required')
2629 port = m.group('port') or '5432'
2630 sslmode = m.group('sslmode')
2631 if sslmode:
2632 msg = ("dbname='%s' user='%s' host='%s' "
2633 "port=%s password='%s' sslmode='%s'") \
2634 % (db, user, host, port, password, sslmode)
2635 else:
2636 msg = ("dbname='%s' user='%s' host='%s' "
2637 "port=%s password='%s'") \
2638 % (db, user, host, port, password)
2639
2640 if self.driver:
2641 self.__version__ = "%s %s" % (self.driver.__name__,
2642 self.driver.__version__)
2643 else:
2644 self.__version__ = None
2645 def connector(msg=msg,driver_args=driver_args):
2646 return self.driver.connect(msg,**driver_args)
2647 self.connector = connector
2648 if do_connect: self.reconnect()
2649
2650 - def after_connection(self):
2651 self.connection.set_client_encoding('UTF8')
2652 self.execute("SET standard_conforming_strings=on;")
2653 self.try_json()
2654
2655 - def lastrowid(self,table):
2656 self.execute("select currval('%s')" % table._sequence_name)
2657 return int(self.cursor.fetchone()[0])
2658
2659 - def try_json(self):
2660
2661
2662 if self.driver_name == "pg8000":
2663 supports_json = self.connection.server_version >= "9.2.0"
2664 elif (self.driver_name == "psycopg2") and \
2665 (self.driver.__version__ >= "2.0.12"):
2666 supports_json = self.connection.server_version >= 90200
2667 elif self.driver_name == "zxJDBC":
2668 supports_json = self.connection.dbversion >= "9.2.0"
2669 else: supports_json = None
2670 if supports_json:
2671 self.types["json"] = "JSON"
2672 self.native_json = True
2673 else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
2674
2675 - def LIKE(self,first,second):
2676 args = (self.expand(first), self.expand(second,'string'))
2677 if not first.type in ('string', 'text', 'json'):
2678 return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1])
2679 else:
2680 return '(%s LIKE %s)' % args
2681
2682 - def ILIKE(self,first,second):
2683 args = (self.expand(first), self.expand(second,'string'))
2684 if not first.type in ('string', 'text', 'json'):
2685 return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1])
2686 else:
2687 return '(%s ILIKE %s)' % args
2688
2689 - def REGEXP(self,first,second):
2690 return '(%s ~ %s)' % (self.expand(first),
2691 self.expand(second,'string'))
2692
2693 - def STARTSWITH(self,first,second):
2694 return '(%s ILIKE %s)' % (self.expand(first),
2695 self.expand(second+'%','string'))
2696
2697 - def ENDSWITH(self,first,second):
2698 return '(%s ILIKE %s)' % (self.expand(first),
2699 self.expand('%'+second,'string'))
2700
2701
2702
2703 - def ST_ASGEOJSON(self, first, second):
2704 """
2705 http://postgis.org/docs/ST_AsGeoJSON.html
2706 """
2707 return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
2708 self.expand(first), second['precision'], second['options'])
2709
2710 - def ST_ASTEXT(self, first):
2711 """
2712 http://postgis.org/docs/ST_AsText.html
2713 """
2714 return 'ST_AsText(%s)' %(self.expand(first))
2715
2716 - def ST_X(self, first):
2717 """
2718 http://postgis.org/docs/ST_X.html
2719 """
2720 return 'ST_X(%s)' %(self.expand(first))
2721
2722 - def ST_Y(self, first):
2723 """
2724 http://postgis.org/docs/ST_Y.html
2725 """
2726 return 'ST_Y(%s)' %(self.expand(first))
2727
2728 - def ST_CONTAINS(self, first, second):
2729 """
2730 http://postgis.org/docs/ST_Contains.html
2731 """
2732 return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2733
2734 - def ST_DISTANCE(self, first, second):
2735 """
2736 http://postgis.org/docs/ST_Distance.html
2737 """
2738 return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2739
2740 - def ST_EQUALS(self, first, second):
2741 """
2742 http://postgis.org/docs/ST_Equals.html
2743 """
2744 return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2745
2746 - def ST_INTERSECTS(self, first, second):
2747 """
2748 http://postgis.org/docs/ST_Intersects.html
2749 """
2750 return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2751
2752 - def ST_OVERLAPS(self, first, second):
2753 """
2754 http://postgis.org/docs/ST_Overlaps.html
2755 """
2756 return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2757
2758 - def ST_SIMPLIFY(self, first, second):
2759 """
2760 http://postgis.org/docs/ST_Simplify.html
2761 """
2762 return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
2763
2764 - def ST_TOUCHES(self, first, second):
2765 """
2766 http://postgis.org/docs/ST_Touches.html
2767 """
2768 return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2769
2770 - def ST_WITHIN(self, first, second):
2771 """
2772 http://postgis.org/docs/ST_Within.html
2773 """
2774 return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2775
2776 - def represent(self, obj, fieldtype):
2777 field_is_type = fieldtype.startswith
2778 if field_is_type('geo'):
2779 srid = 4326
2780 geotype, parms = fieldtype[:-1].split('(')
2781 parms = parms.split(',')
2782 if len(parms) >= 2:
2783 schema, srid = parms[:2]
2784 if field_is_type('geometry'):
2785 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2786 elif field_is_type('geography'):
2787 value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
2788
2789
2790 return value
2791 return BaseAdapter.represent(self, obj, fieldtype)
2792
2793 -class NewPostgreSQLAdapter(PostgreSQLAdapter):
2794 drivers = ('psycopg2','pg8000')
2795
2796 types = {
2797 'boolean': 'CHAR(1)',
2798 'string': 'VARCHAR(%(length)s)',
2799 'text': 'TEXT',
2800 'json': 'TEXT',
2801 'password': 'VARCHAR(%(length)s)',
2802 'blob': 'BYTEA',
2803 'upload': 'VARCHAR(%(length)s)',
2804 'integer': 'INTEGER',
2805 'bigint': 'BIGINT',
2806 'float': 'FLOAT',
2807 'double': 'FLOAT8',
2808 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2809 'date': 'DATE',
2810 'time': 'TIME',
2811 'datetime': 'TIMESTAMP',
2812 'id': 'SERIAL PRIMARY KEY',
2813 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2814 'list:integer': 'BIGINT[]',
2815 'list:string': 'TEXT[]',
2816 'list:reference': 'BIGINT[]',
2817 'geometry': 'GEOMETRY',
2818 'geography': 'GEOGRAPHY',
2819 'big-id': 'BIGSERIAL PRIMARY KEY',
2820 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2821 }
2822
2823 - def parse_list_integers(self, value, field_type):
2825
2826 - def parse_list_references(self, value, field_type):
2827 return [self.parse_reference(r, field_type[5:]) for r in value]
2828
2829 - def parse_list_strings(self, value, field_type):
2831
2832 - def represent(self, obj, fieldtype):
2833 field_is_type = fieldtype.startswith
2834 if field_is_type('list:'):
2835 if not obj:
2836 obj = []
2837 elif not isinstance(obj, (list, tuple)):
2838 obj = [obj]
2839 if field_is_type('list:string'):
2840 obj = map(str,obj)
2841 else:
2842 obj = map(int,obj)
2843 return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
2844 return BaseAdapter.represent(self, obj, fieldtype)
2845
2846
2847 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
2848 drivers = ('zxJDBC',)
2849
2850 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
2851
2852 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2853 credential_decoder=IDENTITY, driver_args={},
2854 adapter_args={}, do_connect=True, after_connection=None ):
2855 self.db = db
2856 self.dbengine = "postgres"
2857 self.uri = uri
2858 if do_connect: self.find_driver(adapter_args,uri)
2859 self.pool_size = pool_size
2860 self.folder = folder
2861 self.db_codec = db_codec
2862 self._after_connection = after_connection
2863 self.find_or_make_work_folder()
2864 ruri = uri.split('://',1)[1]
2865 m = self.REGEX_URI.match(ruri)
2866 if not m:
2867 raise SyntaxError("Invalid URI string in DAL")
2868 user = credential_decoder(m.group('user'))
2869 if not user:
2870 raise SyntaxError('User required')
2871 password = credential_decoder(m.group('password'))
2872 if not password:
2873 password = ''
2874 host = m.group('host')
2875 if not host:
2876 raise SyntaxError('Host name required')
2877 db = m.group('db')
2878 if not db:
2879 raise SyntaxError('Database name required')
2880 port = m.group('port') or '5432'
2881 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
2882 def connector(msg=msg,driver_args=driver_args):
2883 return self.driver.connect(*msg,**driver_args)
2884 self.connector = connector
2885 if do_connect: self.reconnect()
2886
2887 - def after_connection(self):
2888 self.connection.set_client_encoding('UTF8')
2889 self.execute('BEGIN;')
2890 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
2891 self.try_json()
2892
2895 drivers = ('cx_Oracle',)
2896
2897 commit_on_alter_table = False
2898 types = {
2899 'boolean': 'CHAR(1)',
2900 'string': 'VARCHAR2(%(length)s)',
2901 'text': 'CLOB',
2902 'json': 'CLOB',
2903 'password': 'VARCHAR2(%(length)s)',
2904 'blob': 'CLOB',
2905 'upload': 'VARCHAR2(%(length)s)',
2906 'integer': 'INT',
2907 'bigint': 'NUMBER',
2908 'float': 'FLOAT',
2909 'double': 'BINARY_DOUBLE',
2910 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2911 'date': 'DATE',
2912 'time': 'CHAR(8)',
2913 'datetime': 'DATE',
2914 'id': 'NUMBER PRIMARY KEY',
2915 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2916 'list:integer': 'CLOB',
2917 'list:string': 'CLOB',
2918 'list:reference': 'CLOB',
2919 'big-id': 'NUMBER PRIMARY KEY',
2920 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2921 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2922 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2923 }
2924
2926 return '%s_sequence' % tablename
2927
2929 return '%s_trigger' % tablename
2930
2932 return 'LEFT OUTER JOIN'
2933
2935 return 'dbms_random.value'
2936
2937 - def NOT_NULL(self,default,field_type):
2938 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2939
2940 - def _drop(self,table,mode):
2943
2944 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2945 if limitby:
2946 (lmin, lmax) = limitby
2947 if len(sql_w) > 1:
2948 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2949 else:
2950 sql_w_row = 'WHERE w_row > %i' % lmin
2951 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2952 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2953
2959
2961 if fieldtype == 'blob':
2962 obj = base64.b64encode(str(obj))
2963 return ":CLOB('%s')" % obj
2964 elif fieldtype == 'date':
2965 if isinstance(obj, (datetime.date, datetime.datetime)):
2966 obj = obj.isoformat()[:10]
2967 else:
2968 obj = str(obj)
2969 return "to_date('%s','yyyy-mm-dd')" % obj
2970 elif fieldtype == 'datetime':
2971 if isinstance(obj, datetime.datetime):
2972 obj = obj.isoformat()[:19].replace('T',' ')
2973 elif isinstance(obj, datetime.date):
2974 obj = obj.isoformat()[:10]+' 00:00:00'
2975 else:
2976 obj = str(obj)
2977 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
2978 return None
2979
2980 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2981 credential_decoder=IDENTITY, driver_args={},
2982 adapter_args={}, do_connect=True, after_connection=None):
2983 self.db = db
2984 self.dbengine = "oracle"
2985 self.uri = uri
2986 if do_connect: self.find_driver(adapter_args,uri)
2987 self.pool_size = pool_size
2988 self.folder = folder
2989 self.db_codec = db_codec
2990 self._after_connection = after_connection
2991 self.find_or_make_work_folder()
2992 ruri = uri.split('://',1)[1]
2993 if not 'threaded' in driver_args:
2994 driver_args['threaded']=True
2995 def connector(uri=ruri,driver_args=driver_args):
2996 return self.driver.connect(uri,**driver_args)
2997 self.connector = connector
2998 if do_connect: self.reconnect()
2999
3001 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3002 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3003
3004 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
3005
3006 - def execute(self, command, args=None):
3007 args = args or []
3008 i = 1
3009 while True:
3010 m = self.oracle_fix.match(command)
3011 if not m:
3012 break
3013 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
3014 args.append(m.group('clob')[6:-2].replace("''", "'"))
3015 i += 1
3016 if command[-1:]==';':
3017 command = command[:-1]
3018 return self.log_execute(command, args)
3019
3021 tablename = table._tablename
3022 sequence_name = table._sequence_name
3023 trigger_name = table._trigger_name
3024 self.execute(query)
3025 self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
3026 self.execute("""
3027 CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
3028 DECLARE
3029 curr_val NUMBER;
3030 diff_val NUMBER;
3031 PRAGMA autonomous_transaction;
3032 BEGIN
3033 IF :NEW.id IS NOT NULL THEN
3034 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3035 diff_val := :NEW.id - curr_val - 1;
3036 IF diff_val != 0 THEN
3037 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
3038 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3039 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
3040 END IF;
3041 END IF;
3042 SELECT %(sequence_name)s.nextval INTO :NEW.id FROM DUAL;
3043 END;
3044 """ % dict(trigger_name=trigger_name, tablename=tablename, sequence_name=sequence_name))
3045
3050
3051
3052
3053
3054
3055
3056
3057
3058
3059
3061 if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description):
3062 return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
3063 for c in r]) for r in self.cursor]
3064 else:
3065 return self.cursor.fetchall()
3066
3068 drivers = ('pyodbc',)
3069 T_SEP = 'T'
3070
3071 types = {
3072 'boolean': 'BIT',
3073 'string': 'VARCHAR(%(length)s)',
3074 'text': 'TEXT',
3075 'json': 'TEXT',
3076 'password': 'VARCHAR(%(length)s)',
3077 'blob': 'IMAGE',
3078 'upload': 'VARCHAR(%(length)s)',
3079 'integer': 'INT',
3080 'bigint': 'BIGINT',
3081 'float': 'FLOAT',
3082 'double': 'FLOAT',
3083 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3084 'date': 'DATETIME',
3085 'time': 'CHAR(8)',
3086 'datetime': 'DATETIME',
3087 'id': 'INT IDENTITY PRIMARY KEY',
3088 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3089 'list:integer': 'TEXT',
3090 'list:string': 'TEXT',
3091 'list:reference': 'TEXT',
3092 'geometry': 'geometry',
3093 'geography': 'geography',
3094 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3095 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3096 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3097 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3098 }
3099
3101 return '; ALTER TABLE %s ADD ' % tablename
3102
3105
3107 return "DATEPART(%s,%s)" % (what, self.expand(field))
3108
3110 return 'LEFT OUTER JOIN'
3111
3114
3117
3119 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
3120
3122 return 'PRIMARY KEY CLUSTERED (%s)' % key
3123
3125 if what == 'LENGTH':
3126 what = 'LEN'
3127 return "%s(%s)" % (what, self.expand(first))
3128
3129
3130 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3131 if limitby:
3132 (lmin, lmax) = limitby
3133 sql_s += ' TOP %i' % lmax
3134 if 'GROUP BY' in sql_o:
3135 orderfound = sql_o.find('ORDER BY ')
3136 if orderfound >= 0:
3137 sql_o = sql_o[:orderfound]
3138 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3139
3140 TRUE = 1
3141 FALSE = 0
3142
3143 REGEX_DSN = re.compile('^(?P<dsn>.+)$')
3144 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
3145 REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
3146
3147 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3148 credential_decoder=IDENTITY, driver_args={},
3149 adapter_args={}, do_connect=True, srid=4326,
3150 after_connection=None):
3151 self.db = db
3152 self.dbengine = "mssql"
3153 self.uri = uri
3154 if do_connect: self.find_driver(adapter_args,uri)
3155 self.pool_size = pool_size
3156 self.folder = folder
3157 self.db_codec = db_codec
3158 self._after_connection = after_connection
3159 self.srid = srid
3160 self.find_or_make_work_folder()
3161
3162 ruri = uri.split('://',1)[1]
3163 if '@' not in ruri:
3164 try:
3165 m = self.REGEX_DSN.match(ruri)
3166 if not m:
3167 raise SyntaxError(
3168 'Parsing uri string(%s) has no result' % self.uri)
3169 dsn = m.group('dsn')
3170 if not dsn:
3171 raise SyntaxError('DSN required')
3172 except SyntaxError:
3173 e = sys.exc_info()[1]
3174 LOGGER.error('NdGpatch error')
3175 raise e
3176
3177 cnxn = dsn
3178 else:
3179 m = self.REGEX_URI.match(ruri)
3180 if not m:
3181 raise SyntaxError(
3182 "Invalid URI string in DAL: %s" % self.uri)
3183 user = credential_decoder(m.group('user'))
3184 if not user:
3185 raise SyntaxError('User required')
3186 password = credential_decoder(m.group('password'))
3187 if not password:
3188 password = ''
3189 host = m.group('host')
3190 if not host:
3191 raise SyntaxError('Host name required')
3192 db = m.group('db')
3193 if not db:
3194 raise SyntaxError('Database name required')
3195 port = m.group('port') or '1433'
3196
3197
3198
3199 argsdict = { 'DRIVER':'{SQL Server}' }
3200 urlargs = m.group('urlargs') or ''
3201 for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
3202 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
3203 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
3204 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
3205 % (host, port, db, user, password, urlargs)
3206 def connector(cnxn=cnxn,driver_args=driver_args):
3207 return self.driver.connect(cnxn,**driver_args)
3208 self.connector = connector
3209 if do_connect: self.reconnect()
3210
3212
3213 self.execute('SELECT SCOPE_IDENTITY();')
3214 return long(self.cursor.fetchone()[0])
3215
3216 - def rowslice(self,rows,minimum=0,maximum=None):
3217 if maximum is None:
3218 return rows[minimum:]
3219 return rows[minimum:maximum]
3220
3221 - def EPOCH(self, first):
3222 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3223
3226
3227
3228
3229
3230
3231 - def ST_ASTEXT(self, first):
3232 return '%s.STAsText()' %(self.expand(first))
3233
3236
3239
3242
3245
3248
3249
3250
3253
3256
3258 field_is_type = fieldtype.startswith
3259 if field_is_type('geometry'):
3260 srid = 0
3261 geotype, parms = fieldtype[:-1].split('(')
3262 if parms:
3263 srid = parms
3264 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3265 elif fieldtype == 'geography':
3266 srid = 4326
3267 geotype, parms = fieldtype[:-1].split('(')
3268 if parms:
3269 srid = parms
3270 return "geography::STGeomFromText('%s',%s)" %(obj, srid)
3271
3272
3273 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3274 return BaseAdapter.represent(self, obj, fieldtype)
3275
3278 """ experimental support for pagination in MSSQL"""
3279 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3280 if limitby:
3281 (lmin, lmax) = limitby
3282 if lmin == 0:
3283 sql_s += ' TOP %i' % lmax
3284 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3285 lmin += 1
3286 sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
3287 sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
3288 sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
3289 sql_f_inner = [f for f in sql_f.split(',')]
3290 sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
3291 sql_f_iproxy = ', '.join(sql_f_iproxy)
3292 sql_f_oproxy = ', '.join(sql_f_outer)
3293 return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
3294 return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
3295 - def rowslice(self,rows,minimum=0,maximum=None):
3297
3300 drivers = ('pyodbc',)
3301
3302 types = {
3303 'boolean': 'CHAR(1)',
3304 'string': 'NVARCHAR(%(length)s)',
3305 'text': 'NTEXT',
3306 'json': 'NTEXT',
3307 'password': 'NVARCHAR(%(length)s)',
3308 'blob': 'IMAGE',
3309 'upload': 'NVARCHAR(%(length)s)',
3310 'integer': 'INT',
3311 'bigint': 'BIGINT',
3312 'float': 'FLOAT',
3313 'double': 'FLOAT',
3314 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3315 'date': 'DATETIME',
3316 'time': 'CHAR(8)',
3317 'datetime': 'DATETIME',
3318 'id': 'INT IDENTITY PRIMARY KEY',
3319 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3320 'list:integer': 'NTEXT',
3321 'list:string': 'NTEXT',
3322 'list:reference': 'NTEXT',
3323 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3324 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3325 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3326 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3327 }
3328
3330 value = BaseAdapter.represent(self, obj, fieldtype)
3331 if fieldtype in ('string','text', 'json') and value[:1]=="'":
3332 value = 'N'+value
3333 return value
3334
3337
3339 drivers = ('pyodbc',)
3340 T_SEP = ' '
3341
3342 types = {
3343 'boolean': 'BOOLEAN',
3344 'string': 'VARCHAR(%(length)s)',
3345 'text': 'BYTEA',
3346 'json': 'VARCHAR(%(length)s)',
3347 'password': 'VARCHAR(%(length)s)',
3348 'blob': 'BYTEA',
3349 'upload': 'VARCHAR(%(length)s)',
3350 'integer': 'INT',
3351 'bigint': 'BIGINT',
3352 'float': 'FLOAT',
3353 'double': 'DOUBLE PRECISION',
3354 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3355 'date': 'DATE',
3356 'time': 'TIME',
3357 'datetime': 'DATETIME',
3358 'id': 'IDENTITY',
3359 'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3360 'list:integer': 'BYTEA',
3361 'list:string': 'BYTEA',
3362 'list:reference': 'BYTEA',
3363 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3364 }
3365
3366
3368 return "DATE_PART('%s', TIMESTAMP %s)" % (what, self.expand(first))
3369
3371 tablename = table._tablename
3372 return ['TRUNCATE %s %s;' % (tablename, mode or '')]
3373
3374 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3375 if limitby:
3376 (lmin, lmax) = limitby
3377 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
3378 return 'SELECT %s %s FROM %s%s%s;' % \
3379 (sql_s, sql_f, sql_t, sql_w, sql_o)
3380
3382 self.execute('SELECT LAST_INSERT_ID();')
3383 return long(self.cursor.fetchone()[0])
3384
3387
3389 drivers = ('Sybase',)
3390
3391 types = {
3392 'boolean': 'BIT',
3393 'string': 'CHAR VARYING(%(length)s)',
3394 'text': 'TEXT',
3395 'json': 'TEXT',
3396 'password': 'CHAR VARYING(%(length)s)',
3397 'blob': 'IMAGE',
3398 'upload': 'CHAR VARYING(%(length)s)',
3399 'integer': 'INT',
3400 'bigint': 'BIGINT',
3401 'float': 'FLOAT',
3402 'double': 'FLOAT',
3403 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3404 'date': 'DATETIME',
3405 'time': 'CHAR(8)',
3406 'datetime': 'DATETIME',
3407 'id': 'INT IDENTITY PRIMARY KEY',
3408 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3409 'list:integer': 'TEXT',
3410 'list:string': 'TEXT',
3411 'list:reference': 'TEXT',
3412 'geometry': 'geometry',
3413 'geography': 'geography',
3414 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3415 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3416 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3417 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3418 }
3419
3420
3421 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3422 credential_decoder=IDENTITY, driver_args={},
3423 adapter_args={}, do_connect=True, srid=4326,
3424 after_connection=None):
3425 self.db = db
3426 self.dbengine = "sybase"
3427 self.uri = uri
3428 if do_connect: self.find_driver(adapter_args,uri)
3429 self.pool_size = pool_size
3430 self.folder = folder
3431 self.db_codec = db_codec
3432 self._after_connection = after_connection
3433 self.srid = srid
3434 self.find_or_make_work_folder()
3435
3436 ruri = uri.split('://',1)[1]
3437 if '@' not in ruri:
3438 try:
3439 m = self.REGEX_DSN.match(ruri)
3440 if not m:
3441 raise SyntaxError(
3442 'Parsing uri string(%s) has no result' % self.uri)
3443 dsn = m.group('dsn')
3444 if not dsn:
3445 raise SyntaxError('DSN required')
3446 except SyntaxError:
3447 e = sys.exc_info()[1]
3448 LOGGER.error('NdGpatch error')
3449 raise e
3450 else:
3451 m = self.REGEX_URI.match(uri)
3452 if not m:
3453 raise SyntaxError(
3454 "Invalid URI string in DAL: %s" % self.uri)
3455 user = credential_decoder(m.group('user'))
3456 if not user:
3457 raise SyntaxError('User required')
3458 password = credential_decoder(m.group('password'))
3459 if not password:
3460 password = ''
3461 host = m.group('host')
3462 if not host:
3463 raise SyntaxError('Host name required')
3464 db = m.group('db')
3465 if not db:
3466 raise SyntaxError('Database name required')
3467 port = m.group('port') or '1433'
3468
3469 dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
3470
3471 driver_args.update(user = credential_decoder(user),
3472 password = credential_decoder(password))
3473
3474 def connector(dsn=dsn,driver_args=driver_args):
3475 return self.driver.connect(dsn,**driver_args)
3476 self.connector = connector
3477 if do_connect: self.reconnect()
3478
3481 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3482
3483 commit_on_alter_table = False
3484 support_distributed_transaction = True
3485 types = {
3486 'boolean': 'CHAR(1)',
3487 'string': 'VARCHAR(%(length)s)',
3488 'text': 'BLOB SUB_TYPE 1',
3489 'json': 'BLOB SUB_TYPE 1',
3490 'password': 'VARCHAR(%(length)s)',
3491 'blob': 'BLOB SUB_TYPE 0',
3492 'upload': 'VARCHAR(%(length)s)',
3493 'integer': 'INTEGER',
3494 'bigint': 'BIGINT',
3495 'float': 'FLOAT',
3496 'double': 'DOUBLE PRECISION',
3497 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3498 'date': 'DATE',
3499 'time': 'TIME',
3500 'datetime': 'TIMESTAMP',
3501 'id': 'INTEGER PRIMARY KEY',
3502 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3503 'list:integer': 'BLOB SUB_TYPE 1',
3504 'list:string': 'BLOB SUB_TYPE 1',
3505 'list:reference': 'BLOB SUB_TYPE 1',
3506 'big-id': 'BIGINT PRIMARY KEY',
3507 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3508 }
3509
3511 return 'genid_%s' % tablename
3512
3514 return 'trg_id_%s' % tablename
3515
3518
3519 - def EPOCH(self, first):
3520 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3521
3522 - def NOT_NULL(self,default,field_type):
3523 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3524
3526 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
3527
3530
3531 - def CONTAINS(self,first,second,case_sensitive=False):
3537
3538 - def _drop(self,table,mode):
3541
3542 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3543 if limitby:
3544 (lmin, lmax) = limitby
3545 sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
3546 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3547
3549 return ['DELETE FROM %s;' % table._tablename,
3550 'SET GENERATOR %s TO 0;' % table._sequence_name]
3551
3552 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
3553
3554 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3555 credential_decoder=IDENTITY, driver_args={},
3556 adapter_args={}, do_connect=True, after_connection=None):
3557 self.db = db
3558 self.dbengine = "firebird"
3559 self.uri = uri
3560 if do_connect: self.find_driver(adapter_args,uri)
3561 self.pool_size = pool_size
3562 self.folder = folder
3563 self.db_codec = db_codec
3564 self._after_connection = after_connection
3565 self.find_or_make_work_folder()
3566 ruri = uri.split('://',1)[1]
3567 m = self.REGEX_URI.match(ruri)
3568 if not m:
3569 raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
3570 user = credential_decoder(m.group('user'))
3571 if not user:
3572 raise SyntaxError('User required')
3573 password = credential_decoder(m.group('password'))
3574 if not password:
3575 password = ''
3576 host = m.group('host')
3577 if not host:
3578 raise SyntaxError('Host name required')
3579 port = int(m.group('port') or 3050)
3580 db = m.group('db')
3581 if not db:
3582 raise SyntaxError('Database name required')
3583 charset = m.group('charset') or 'UTF8'
3584 driver_args.update(dsn='%s/%s:%s' % (host,port,db),
3585 user = credential_decoder(user),
3586 password = credential_decoder(password),
3587 charset = charset)
3588
3589 def connector(driver_args=driver_args):
3590 return self.driver.connect(**driver_args)
3591 self.connector = connector
3592 if do_connect: self.reconnect()
3593
3602
3607
3610 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3611
3612 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
3613
3614 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3615 credential_decoder=IDENTITY, driver_args={},
3616 adapter_args={}, do_connect=True, after_connection=None):
3617 self.db = db
3618 self.dbengine = "firebird"
3619 self.uri = uri
3620 if do_connect: self.find_driver(adapter_args,uri)
3621 self.pool_size = pool_size
3622 self.folder = folder
3623 self.db_codec = db_codec
3624 self._after_connection = after_connection
3625 self.find_or_make_work_folder()
3626 ruri = uri.split('://',1)[1]
3627 m = self.REGEX_URI.match(ruri)
3628 if not m:
3629 raise SyntaxError(
3630 "Invalid URI string in DAL: %s" % self.uri)
3631 user = credential_decoder(m.group('user'))
3632 if not user:
3633 raise SyntaxError('User required')
3634 password = credential_decoder(m.group('password'))
3635 if not password:
3636 password = ''
3637 pathdb = m.group('path')
3638 if not pathdb:
3639 raise SyntaxError('Path required')
3640 charset = m.group('charset')
3641 if not charset:
3642 charset = 'UTF8'
3643 host = ''
3644 driver_args.update(host=host,
3645 database=pathdb,
3646 user=credential_decoder(user),
3647 password=credential_decoder(password),
3648 charset=charset)
3649
3650 def connector(driver_args=driver_args):
3651 return self.driver.connect(**driver_args)
3652 self.connector = connector
3653 if do_connect: self.reconnect()
3654
3760
3765
3768
3780
3782 drivers = ('pyodbc',)
3783
3784 types = {
3785 'boolean': 'CHAR(1)',
3786 'string': 'VARCHAR(%(length)s)',
3787 'text': 'CLOB',
3788 'json': 'CLOB',
3789 'password': 'VARCHAR(%(length)s)',
3790 'blob': 'BLOB',
3791 'upload': 'VARCHAR(%(length)s)',
3792 'integer': 'INT',
3793 'bigint': 'BIGINT',
3794 'float': 'REAL',
3795 'double': 'DOUBLE',
3796 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3797 'date': 'DATE',
3798 'time': 'TIME',
3799 'datetime': 'TIMESTAMP',
3800 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
3801 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3802 'list:integer': 'CLOB',
3803 'list:string': 'CLOB',
3804 'list:reference': 'CLOB',
3805 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
3806 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3807 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3808 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3809 }
3810
3812 return 'LEFT OUTER JOIN'
3813
3816
3817 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3818 if limitby:
3819 (lmin, lmax) = limitby
3820 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
3821 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3822
3824 if fieldtype == 'blob':
3825 obj = base64.b64encode(str(obj))
3826 return "BLOB('%s')" % obj
3827 elif fieldtype == 'datetime':
3828 if isinstance(obj, datetime.datetime):
3829 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
3830 elif isinstance(obj, datetime.date):
3831 obj = obj.isoformat()[:10]+'-00.00.00'
3832 return "'%s'" % obj
3833 return None
3834
3835 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3836 credential_decoder=IDENTITY, driver_args={},
3837 adapter_args={}, do_connect=True, after_connection=None):
3838 self.db = db
3839 self.dbengine = "db2"
3840 self.uri = uri
3841 if do_connect: self.find_driver(adapter_args,uri)
3842 self.pool_size = pool_size
3843 self.folder = folder
3844 self.db_codec = db_codec
3845 self._after_connection = after_connection
3846 self.find_or_make_work_folder()
3847 ruri = uri.split('://', 1)[1]
3848 def connector(cnxn=ruri,driver_args=driver_args):
3849 return self.driver.connect(cnxn,**driver_args)
3850 self.connector = connector
3851 if do_connect: self.reconnect()
3852
3854 if command[-1:]==';':
3855 command = command[:-1]
3856 return self.log_execute(command)
3857
3859 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
3860 return long(self.cursor.fetchone()[0])
3861
3862 - def rowslice(self,rows,minimum=0,maximum=None):
3863 if maximum is None:
3864 return rows[minimum:]
3865 return rows[minimum:maximum]
3866
3869 drivers = ('pyodbc',)
3870
3871 types = {
3872 'boolean': 'CHAR(1)',
3873 'string': 'VARCHAR(%(length)s)',
3874 'text': 'CLOB',
3875 'json': 'CLOB',
3876 'password': 'VARCHAR(%(length)s)',
3877 'blob': 'BLOB',
3878 'upload': 'VARCHAR(%(length)s)',
3879 'integer': 'INT',
3880 'bigint': 'BIGINT',
3881 'float': 'REAL',
3882 'double': 'DOUBLE',
3883 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3884 'date': 'DATE',
3885 'time': 'TIME',
3886 'datetime': 'TIMESTAMP',
3887
3888
3889 'id': 'INT GENERATED ALWAYS AS IDENTITY',
3890 'reference': 'INT',
3891 'list:integer': 'CLOB',
3892 'list:string': 'CLOB',
3893 'list:reference': 'CLOB',
3894 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY',
3895 'big-reference': 'BIGINT',
3896 'reference FK': ' REFERENCES %(foreign_key)s',
3897 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
3898 }
3899
3900 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3901 credential_decoder=IDENTITY, driver_args={},
3902 adapter_args={}, do_connect=True, after_connection=None):
3903 self.db = db
3904 self.dbengine = "teradata"
3905 self.uri = uri
3906 if do_connect: self.find_driver(adapter_args,uri)
3907 self.pool_size = pool_size
3908 self.folder = folder
3909 self.db_codec = db_codec
3910 self._after_connection = after_connection
3911 self.find_or_make_work_folder()
3912 ruri = uri.split('://', 1)[1]
3913 def connector(cnxn=ruri,driver_args=driver_args):
3914 return self.driver.connect(cnxn,**driver_args)
3915 self.connector = connector
3916 if do_connect: self.reconnect()
3917
3919 return 'LEFT OUTER JOIN'
3920
3921
3922 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3923 if limitby:
3924 (lmin, lmax) = limitby
3925 sql_s += ' TOP %i' % lmax
3926 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3927
3929 tablename = table._tablename
3930 return ['DELETE FROM %s ALL;' % (tablename)]
3931
3932 INGRES_SEQNAME='ii***lineitemsequence'
3937 drivers = ('pyodbc',)
3938
3939 types = {
3940 'boolean': 'CHAR(1)',
3941 'string': 'VARCHAR(%(length)s)',
3942 'text': 'CLOB',
3943 'json': 'CLOB',
3944 'password': 'VARCHAR(%(length)s)',
3945 'blob': 'BLOB',
3946 'upload': 'VARCHAR(%(length)s)',
3947 'integer': 'INTEGER4',
3948 'bigint': 'BIGINT',
3949 'float': 'FLOAT',
3950 'double': 'FLOAT8',
3951 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3952 'date': 'ANSIDATE',
3953 'time': 'TIME WITHOUT TIME ZONE',
3954 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
3955 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
3956 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3957 'list:integer': 'CLOB',
3958 'list:string': 'CLOB',
3959 'list:reference': 'CLOB',
3960 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
3961 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3962 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3963 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3964 }
3965
3967 return 'LEFT OUTER JOIN'
3968
3971
3972 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3973 if limitby:
3974 (lmin, lmax) = limitby
3975 fetch_amt = lmax - lmin
3976 if fetch_amt:
3977 sql_s += ' FIRST %d ' % (fetch_amt, )
3978 if lmin:
3979
3980 sql_o += ' OFFSET %d' % (lmin, )
3981 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3982
3983 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3984 credential_decoder=IDENTITY, driver_args={},
3985 adapter_args={}, do_connect=True, after_connection=None):
3986 self.db = db
3987 self.dbengine = "ingres"
3988 self._driver = pyodbc
3989 self.uri = uri
3990 if do_connect: self.find_driver(adapter_args,uri)
3991 self.pool_size = pool_size
3992 self.folder = folder
3993 self.db_codec = db_codec
3994 self._after_connection = after_connection
3995 self.find_or_make_work_folder()
3996 connstr = uri.split(':', 1)[1]
3997
3998 connstr = connstr.lstrip()
3999 while connstr.startswith('/'):
4000 connstr = connstr[1:]
4001 if '=' in connstr:
4002
4003 ruri = connstr
4004 else:
4005
4006 database_name = connstr
4007 default_driver_name = 'Ingres'
4008 vnode = '(local)'
4009 servertype = 'ingres'
4010 ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
4011 def connector(cnxn=ruri,driver_args=driver_args):
4012 return self.driver.connect(cnxn,**driver_args)
4013
4014 self.connector = connector
4015
4016
4017 if do_connect: self.reconnect()
4018
4020
4021
4022
4023 if hasattr(table,'_primarykey'):
4024 modify_tbl_sql = 'modify %s to btree unique on %s' % \
4025 (table._tablename,
4026 ', '.join(["'%s'" % x for x in table.primarykey]))
4027 self.execute(modify_tbl_sql)
4028 else:
4029 tmp_seqname='%s_iisq' % table._tablename
4030 query=query.replace(INGRES_SEQNAME, tmp_seqname)
4031 self.execute('create sequence %s' % tmp_seqname)
4032 self.execute(query)
4033 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
4034
4035
4037 tmp_seqname='%s_iisq' % table
4038 self.execute('select current value for %s' % tmp_seqname)
4039 return long(self.cursor.fetchone()[0])
4040
4043
4044 drivers = ('pyodbc',)
4045
4046 types = {
4047 'boolean': 'CHAR(1)',
4048 'string': 'NVARCHAR(%(length)s)',
4049 'text': 'NCLOB',
4050 'json': 'NCLOB',
4051 'password': 'NVARCHAR(%(length)s)',
4052 'blob': 'BLOB',
4053 'upload': 'VARCHAR(%(length)s)',
4054 'integer': 'INTEGER4',
4055 'bigint': 'BIGINT',
4056 'float': 'FLOAT',
4057 'double': 'FLOAT8',
4058 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4059 'date': 'ANSIDATE',
4060 'time': 'TIME WITHOUT TIME ZONE',
4061 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4062 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
4063 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4064 'list:integer': 'NCLOB',
4065 'list:string': 'NCLOB',
4066 'list:reference': 'NCLOB',
4067 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
4068 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4069 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4070 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4071 }
4072
4074 drivers = ('sapdb',)
4075
4076 support_distributed_transaction = False
4077 types = {
4078 'boolean': 'CHAR(1)',
4079 'string': 'VARCHAR(%(length)s)',
4080 'text': 'LONG',
4081 'json': 'LONG',
4082 'password': 'VARCHAR(%(length)s)',
4083 'blob': 'LONG',
4084 'upload': 'VARCHAR(%(length)s)',
4085 'integer': 'INT',
4086 'bigint': 'BIGINT',
4087 'float': 'FLOAT',
4088 'double': 'DOUBLE PRECISION',
4089 'decimal': 'FIXED(%(precision)s,%(scale)s)',
4090 'date': 'DATE',
4091 'time': 'TIME',
4092 'datetime': 'TIMESTAMP',
4093 'id': 'INT PRIMARY KEY',
4094 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4095 'list:integer': 'LONG',
4096 'list:string': 'LONG',
4097 'list:reference': 'LONG',
4098 'big-id': 'BIGINT PRIMARY KEY',
4099 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4100 }
4101
4103 return '%s_id_Seq' % table
4104
4105 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4106 if limitby:
4107 (lmin, lmax) = limitby
4108 if len(sql_w) > 1:
4109 sql_w_row = sql_w + ' AND w_row > %i' % lmin
4110 else:
4111 sql_w_row = 'WHERE w_row > %i' % lmin
4112 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
4113 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4114
4116
4117 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
4118 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
4119 % (table._tablename, table._id.name, table._sequence_name))
4120 self.execute(query)
4121
4122 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
4123
4124
4125 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4126 credential_decoder=IDENTITY, driver_args={},
4127 adapter_args={}, do_connect=True, after_connection=None):
4128 self.db = db
4129 self.dbengine = "sapdb"
4130 self.uri = uri
4131 if do_connect: self.find_driver(adapter_args,uri)
4132 self.pool_size = pool_size
4133 self.folder = folder
4134 self.db_codec = db_codec
4135 self._after_connection = after_connection
4136 self.find_or_make_work_folder()
4137 ruri = uri.split('://',1)[1]
4138 m = self.REGEX_URI.match(ruri)
4139 if not m:
4140 raise SyntaxError("Invalid URI string in DAL")
4141 user = credential_decoder(m.group('user'))
4142 if not user:
4143 raise SyntaxError('User required')
4144 password = credential_decoder(m.group('password'))
4145 if not password:
4146 password = ''
4147 host = m.group('host')
4148 if not host:
4149 raise SyntaxError('Host name required')
4150 db = m.group('db')
4151 if not db:
4152 raise SyntaxError('Database name required')
4153 def connector(user=user, password=password, database=db,
4154 host=host, driver_args=driver_args):
4155 return self.driver.Connection(user, password, database,
4156 host, **driver_args)
4157 self.connector = connector
4158 if do_connect: self.reconnect()
4159
4161 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
4162 return long(self.cursor.fetchone()[0])
4163
4165 drivers = ('cubriddb',)
4166
4167 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
4168
4169 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
4170 credential_decoder=IDENTITY, driver_args={},
4171 adapter_args={}, do_connect=True, after_connection=None):
4172 self.db = db
4173 self.dbengine = "cubrid"
4174 self.uri = uri
4175 if do_connect: self.find_driver(adapter_args,uri)
4176 self.pool_size = pool_size
4177 self.folder = folder
4178 self.db_codec = db_codec
4179 self._after_connection = after_connection
4180 self.find_or_make_work_folder()
4181 ruri = uri.split('://',1)[1]
4182 m = self.REGEX_URI.match(ruri)
4183 if not m:
4184 raise SyntaxError(
4185 "Invalid URI string in DAL: %s" % self.uri)
4186 user = credential_decoder(m.group('user'))
4187 if not user:
4188 raise SyntaxError('User required')
4189 password = credential_decoder(m.group('password'))
4190 if not password:
4191 password = ''
4192 host = m.group('host')
4193 if not host:
4194 raise SyntaxError('Host name required')
4195 db = m.group('db')
4196 if not db:
4197 raise SyntaxError('Database name required')
4198 port = int(m.group('port') or '30000')
4199 charset = m.group('charset') or 'utf8'
4200 user = credential_decoder(user)
4201 passwd = credential_decoder(password)
4202 def connector(host=host,port=port,db=db,
4203 user=user,passwd=password,driver_args=driver_args):
4204 return self.driver.connect(host,port,db,user,passwd,**driver_args)
4205 self.connector = connector
4206 if do_connect: self.reconnect()
4207
4209 self.execute('SET FOREIGN_KEY_CHECKS=1;')
4210 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4211
4216
4217 web2py_filesystem = False
4218
4220 return self.db._adapter.escape(obj)
4221
4223 if not db._adapter.dbengine in ('mysql', 'postgres'):
4224 raise RuntimeError("only MySQL/Postgres can store metadata .table files in database for now")
4225 self.db = db
4226 self.filename = filename
4227 self.mode = mode
4228 if not self.web2py_filesystem:
4229 if db._adapter.dbengine == 'mysql':
4230 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
4231 elif db._adapter.dbengine == 'postgres':
4232 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
4233 self.db.executesql(sql)
4234 DatabaseStoredFile.web2py_filesystem = True
4235 self.p=0
4236 self.data = ''
4237 if mode in ('r','rw','a'):
4238 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
4239 % filename
4240 rows = self.db.executesql(query)
4241 if rows:
4242 self.data = rows[0][0]
4243 elif exists(filename):
4244 datafile = open(filename, 'r')
4245 try:
4246 self.data = datafile.read()
4247 finally:
4248 datafile.close()
4249 elif mode in ('r','rw'):
4250 raise RuntimeError("File %s does not exist" % filename)
4251
4252 - def read(self, bytes):
4253 data = self.data[self.p:self.p+bytes]
4254 self.p += len(data)
4255 return data
4256
4258 i = self.data.find('\n',self.p)+1
4259 if i>0:
4260 data, self.p = self.data[self.p:i], i
4261 else:
4262 data, self.p = self.data[self.p:], len(self.data)
4263 return data
4264
4267
4269 if self.db is not None:
4270 self.db.executesql(
4271 "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
4272 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
4273 % (self.filename, self.data.replace("'","''"))
4274 self.db.executesql(query)
4275 self.db.commit()
4276 self.db = None
4277
4280
4281 @staticmethod
4283 if exists(filename):
4284 return True
4285 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
4286 if db.executesql(query):
4287 return True
4288 return False
4289
4292
4295
4296 - def file_open(self, filename, mode='rb', lock=True):
4298
4301
4303 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
4304 self.db.executesql(query)
4305 self.db.commit()
4306
4308 uploads_in_blob = True
4309
4310 REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
4311
4312 - def __init__(self, db, uri='google:sql://realm:domain/database',
4313 pool_size=0, folder=None, db_codec='UTF-8',
4314 credential_decoder=IDENTITY, driver_args={},
4315 adapter_args={}, do_connect=True, after_connection=None):
4316
4317 self.db = db
4318 self.dbengine = "mysql"
4319 self.uri = uri
4320 self.pool_size = pool_size
4321 self.db_codec = db_codec
4322 self._after_connection = after_connection
4323 self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
4324 os.sep+'applications'+os.sep,1)[1])
4325 ruri = uri.split("://")[1]
4326 m = self.REGEX_URI.match(ruri)
4327 if not m:
4328 raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
4329 instance = credential_decoder(m.group('instance'))
4330 self.dbstring = db = credential_decoder(m.group('db'))
4331 driver_args['instance'] = instance
4332 if not 'charset' in driver_args:
4333 driver_args['charset'] = 'utf8'
4334 self.createdb = createdb = adapter_args.get('createdb',True)
4335 if not createdb:
4336 driver_args['database'] = db
4337 def connector(driver_args=driver_args):
4338 return rdbms.connect(**driver_args)
4339 self.connector = connector
4340 if do_connect: self.reconnect()
4341
4343 if self.createdb:
4344
4345 self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
4346 self.execute('USE %s' % self.dbstring)
4347 self.execute("SET FOREIGN_KEY_CHECKS=1;")
4348 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4349
4350 - def execute(self, command, *a, **b):
4352
4354 can_select_for_update = False
4355
4356 @staticmethod
4358 if isinstance(obj, str):
4359 return obj.decode('utf8')
4360 elif not isinstance(obj, unicode):
4361 return unicode(obj)
4362 return obj
4363
4365 return table._id > 0
4366
4368 field_is_type = fieldtype.startswith
4369 if isinstance(obj, CALLABLETYPES):
4370 obj = obj()
4371 if isinstance(fieldtype, SQLCustomType):
4372 return fieldtype.encoder(obj)
4373 if isinstance(obj, (Expression, Field)):
4374 raise SyntaxError("non supported on GAE")
4375 if self.dbengine == 'google:datastore':
4376 if isinstance(fieldtype, gae.Property):
4377 return obj
4378 is_string = isinstance(fieldtype,str)
4379 is_list = is_string and field_is_type('list:')
4380 if is_list:
4381 if not obj:
4382 obj = []
4383 if not isinstance(obj, (list, tuple)):
4384 obj = [obj]
4385 if obj == '' and not \
4386 (is_string and fieldtype[:2] in ['st','te', 'pa','up']):
4387 return None
4388 if not obj is None:
4389 if isinstance(obj, list) and not is_list:
4390 obj = [self.represent(o, fieldtype) for o in obj]
4391 elif fieldtype in ('integer','bigint','id'):
4392 obj = long(obj)
4393 elif fieldtype == 'double':
4394 obj = float(obj)
4395 elif is_string and field_is_type('reference'):
4396 if isinstance(obj, (Row, Reference)):
4397 obj = obj['id']
4398 obj = long(obj)
4399 elif fieldtype == 'boolean':
4400 if obj and not str(obj)[0].upper() in '0F':
4401 obj = True
4402 else:
4403 obj = False
4404 elif fieldtype == 'date':
4405 if not isinstance(obj, datetime.date):
4406 (y, m, d) = map(int,str(obj).strip().split('-'))
4407 obj = datetime.date(y, m, d)
4408 elif isinstance(obj,datetime.datetime):
4409 (y, m, d) = (obj.year, obj.month, obj.day)
4410 obj = datetime.date(y, m, d)
4411 elif fieldtype == 'time':
4412 if not isinstance(obj, datetime.time):
4413 time_items = map(int,str(obj).strip().split(':')[:3])
4414 if len(time_items) == 3:
4415 (h, mi, s) = time_items
4416 else:
4417 (h, mi, s) = time_items + [0]
4418 obj = datetime.time(h, mi, s)
4419 elif fieldtype == 'datetime':
4420 if not isinstance(obj, datetime.datetime):
4421 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
4422 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
4423 while len(time_items)<3:
4424 time_items.append(0)
4425 (h, mi, s) = time_items
4426 obj = datetime.datetime(y, m, d, h, mi, s)
4427 elif fieldtype == 'blob':
4428 pass
4429 elif fieldtype == 'json':
4430 if isinstance(obj, basestring):
4431 obj = self.to_unicode(obj)
4432 if have_serializers:
4433 obj = serializers.loads_json(obj)
4434 elif simplejson:
4435 obj = simplejson.loads(obj)
4436 else:
4437 raise RuntimeError("missing simplejson")
4438 elif is_string and field_is_type('list:string'):
4439 return map(self.to_unicode,obj)
4440 elif is_list:
4441 return map(int,obj)
4442 else:
4443 obj = self.to_unicode(obj)
4444 return obj
4445
4447 return 'insert %s in %s' % (fields, table)
4448
4449 - def _count(self,query,distinct=None):
4450 return 'count %s' % repr(query)
4451
4452 - def _select(self,query,fields,attributes):
4453 return 'select %s where %s' % (repr(fields), repr(query))
4454
4455 - def _delete(self,tablename, query):
4456 return 'delete %s where %s' % (repr(tablename),repr(query))
4457
4458 - def _update(self,tablename,query,fields):
4459 return 'update %s (%s) where %s' % (repr(tablename),
4460 repr(fields),repr(query))
4461
4463 """
4464 remember: no transactions on many NoSQL
4465 """
4466 pass
4467
4469 """
4470 remember: no transactions on many NoSQL
4471 """
4472 pass
4473
4475 """
4476 remember: no transactions on many NoSQL
4477 """
4478 pass
4479
4480
4481
4482 - def OR(self,first,second): raise SyntaxError("Not supported")
4483 - def AND(self,first,second): raise SyntaxError("Not supported")
4484 - def AS(self,first,second): raise SyntaxError("Not supported")
4485 - def ON(self,first,second): raise SyntaxError("Not supported")
4486 - def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
4487 - def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
4488 - def ADD(self,first,second): raise SyntaxError("Not supported")
4489 - def SUB(self,first,second): raise SyntaxError("Not supported")
4490 - def MUL(self,first,second): raise SyntaxError("Not supported")
4491 - def DIV(self,first,second): raise SyntaxError("Not supported")
4492 - def LOWER(self,first): raise SyntaxError("Not supported")
4493 - def UPPER(self,first): raise SyntaxError("Not supported")
4495 - def LENGTH(self, first): raise SyntaxError("Not supported")
4496 - def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
4497 - def LEFT_JOIN(self): raise SyntaxError("Not supported")
4498 - def RANDOM(self): raise SyntaxError("Not supported")
4499 - def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
4500 - def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
4501 - def ILIKE(self,first,second): raise SyntaxError("Not supported")
4502 - def drop(self,table,mode): raise SyntaxError("Not supported")
4503 - def alias(self,table,alias): raise SyntaxError("Not supported")
4504 - def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
4506 - def prepare(self,key): raise SyntaxError("Not supported")
4509 - def concat_add(self,table): raise SyntaxError("Not supported")
4510 - def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
4512 - def log_execute(self,*a,**b): raise SyntaxError("Not supported")
4513 - def execute(self,*a,**b): raise SyntaxError("Not supported")
4515 - def lastrowid(self,table): raise SyntaxError("Not supported")
4516 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
4517
4518
4519 -class GAEF(object):
4520 - def __init__(self,name,op,value,apply):
4521 self.name=name=='id' and '__key__' or name
4522 self.op=op
4523 self.value=value
4524 self.apply=apply
4526 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
4527
4529 uploads_in_blob = True
4530 types = {}
4531
4533 - def file_open(self, filename, mode='rb', lock=True): pass
4535
4536 REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
4537
4538 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4539 credential_decoder=IDENTITY, driver_args={},
4540 adapter_args={}, do_connect=True, after_connection=None):
4541 self.types.update({
4542 'boolean': gae.BooleanProperty,
4543 'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)),
4544 'text': gae.TextProperty,
4545 'json': gae.TextProperty,
4546 'password': gae.StringProperty,
4547 'blob': gae.BlobProperty,
4548 'upload': gae.StringProperty,
4549 'integer': gae.IntegerProperty,
4550 'bigint': gae.IntegerProperty,
4551 'float': gae.FloatProperty,
4552 'double': gae.FloatProperty,
4553 'decimal': GAEDecimalProperty,
4554 'date': gae.DateProperty,
4555 'time': gae.TimeProperty,
4556 'datetime': gae.DateTimeProperty,
4557 'id': None,
4558 'reference': gae.IntegerProperty,
4559 'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)),
4560 'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4561 'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4562 })
4563 self.db = db
4564 self.uri = uri
4565 self.dbengine = 'google:datastore'
4566 self.folder = folder
4567 db['_lastsql'] = ''
4568 self.db_codec = 'UTF-8'
4569 self._after_connection = after_connection
4570 self.pool_size = 0
4571 match = self.REGEX_NAMESPACE.match(uri)
4572 if match:
4573 namespace_manager.set_namespace(match.group('namespace'))
4574
4575 - def parse_id(self, value, field_type):
4577
4578 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
4579 myfields = {}
4580 for field in table:
4581 if isinstance(polymodel,Table) and field.name in polymodel.fields():
4582 continue
4583 attr = {}
4584 if isinstance(field.custom_qualifier, dict):
4585
4586 attr = field.custom_qualifier
4587 field_type = field.type
4588 if isinstance(field_type, SQLCustomType):
4589 ftype = self.types[field_type.native or field_type.type](**attr)
4590 elif isinstance(field_type, gae.Property):
4591 ftype = field_type
4592 elif field_type.startswith('id'):
4593 continue
4594 elif field_type.startswith('decimal'):
4595 precision, scale = field_type[7:].strip('()').split(',')
4596 precision = int(precision)
4597 scale = int(scale)
4598 ftype = GAEDecimalProperty(precision, scale, **attr)
4599 elif field_type.startswith('reference'):
4600 if field.notnull:
4601 attr = dict(required=True)
4602 referenced = field_type[10:].strip()
4603 ftype = self.types[field_type[:9]](referenced, **attr)
4604 elif field_type.startswith('list:reference'):
4605 if field.notnull:
4606 attr['required'] = True
4607 referenced = field_type[15:].strip()
4608 ftype = self.types[field_type[:14]](**attr)
4609 elif field_type.startswith('list:'):
4610 ftype = self.types[field_type](**attr)
4611 elif not field_type in self.types\
4612 or not self.types[field_type]:
4613 raise SyntaxError('Field: unknown field type: %s' % field_type)
4614 else:
4615 ftype = self.types[field_type](**attr)
4616 myfields[field.name] = ftype
4617 if not polymodel:
4618 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
4619 elif polymodel==True:
4620 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
4621 elif isinstance(polymodel,Table):
4622 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
4623 else:
4624 raise SyntaxError("polymodel must be None, True, a table or a tablename")
4625 return None
4626
4627 - def expand(self,expression,field_type=None):
4628 if isinstance(expression,Field):
4629 if expression.type in ('text', 'blob', 'json'):
4630 raise SyntaxError('AppEngine does not index by: %s' % expression.type)
4631 return expression.name
4632 elif isinstance(expression, (Expression, Query)):
4633 if not expression.second is None:
4634 return expression.op(expression.first, expression.second)
4635 elif not expression.first is None:
4636 return expression.op(expression.first)
4637 else:
4638 return expression.op()
4639 elif field_type:
4640 return self.represent(expression,field_type)
4641 elif isinstance(expression,(list,tuple)):
4642 return ','.join([self.represent(item,field_type) for item in expression])
4643 else:
4644 return str(expression)
4645
4646
4647 - def AND(self,first,second):
4653
4654 - def EQ(self,first,second=None):
4655 if isinstance(second, Key):
4656 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
4657 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
4658
4659 - def NE(self,first,second=None):
4660 if first.type != 'id':
4661 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
4662 else:
4663 if not second is None:
4664 second = Key.from_path(first._tablename, long(second))
4665 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
4666
4667 - def LT(self,first,second=None):
4668 if first.type != 'id':
4669 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
4670 else:
4671 second = Key.from_path(first._tablename, long(second))
4672 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
4673
4674 - def LE(self,first,second=None):
4675 if first.type != 'id':
4676 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
4677 else:
4678 second = Key.from_path(first._tablename, long(second))
4679 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
4680
4681 - def GT(self,first,second=None):
4682 if first.type != 'id' or second==0 or second == '0':
4683 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
4684 else:
4685 second = Key.from_path(first._tablename, long(second))
4686 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
4687
4688 - def GE(self,first,second=None):
4689 if first.type != 'id':
4690 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
4691 else:
4692 second = Key.from_path(first._tablename, long(second))
4693 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
4694
4697
4698 - def COMMA(self,first,second):
4700
4701 - def BELONGS(self,first,second=None):
4702 if not isinstance(second,(list, tuple)):
4703 raise SyntaxError("Not supported")
4704 if first.type != 'id':
4705 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
4706 else:
4707 second = [Key.from_path(first._tablename, int(i)) for i in second]
4708 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
4709
4710 - def CONTAINS(self,first,second,case_sensitive=False):
4715
4716 - def NOT(self,first):
4717 nops = { self.EQ: self.NE,
4718 self.NE: self.EQ,
4719 self.LT: self.GE,
4720 self.GT: self.LE,
4721 self.LE: self.GT,
4722 self.GE: self.LT}
4723 if not isinstance(first,Query):
4724 raise SyntaxError("Not suported")
4725 nop = nops.get(first.op,None)
4726 if not nop:
4727 raise SyntaxError("Not suported %s" % first.op.__name__)
4728 first.op = nop
4729 return self.expand(first)
4730
4733
4734 - def select_raw(self,query,fields=None,attributes=None):
4735 db = self.db
4736 fields = fields or []
4737 attributes = attributes or {}
4738 args_get = attributes.get
4739 new_fields = []
4740 for item in fields:
4741 if isinstance(item,SQLALL):
4742 new_fields += item._table
4743 else:
4744 new_fields.append(item)
4745 fields = new_fields
4746 if query:
4747 tablename = self.get_table(query)
4748 elif fields:
4749 tablename = fields[0].tablename
4750 query = db._adapter.id_query(fields[0].table)
4751 else:
4752 raise SyntaxError("Unable to determine a tablename")
4753
4754 if query:
4755 if use_common_filters(query):
4756 query = self.common_filter(query,[tablename])
4757
4758
4759 tableobj = db[tablename]._tableobj
4760 filters = self.expand(query)
4761
4762 projection = None
4763 if len(db[tablename].fields) == len(fields):
4764
4765 projection = None
4766 elif args_get('projection') == True:
4767 projection = []
4768 for f in fields:
4769 if f.type in ['text', 'blob', 'json']:
4770 raise SyntaxError(
4771 "text and blob field types not allowed in projection queries")
4772 else:
4773 projection.append(f.name)
4774 elif args_get('filterfields') == True:
4775 projection = []
4776 for f in fields:
4777 projection.append(f.name)
4778
4779
4780
4781 query_projection = [
4782 p for p in projection if \
4783 p != db[tablename]._id.name] if projection and \
4784 args_get('projection') == True\
4785 else None
4786
4787 cursor = None
4788 if isinstance(args_get('reusecursor'), str):
4789 cursor = args_get('reusecursor')
4790 items = gae.Query(tableobj, projection=query_projection,
4791 cursor=cursor)
4792
4793 for filter in filters:
4794 if args_get('projection') == True and \
4795 filter.name in query_projection and \
4796 filter.op in ['=', '<=', '>=']:
4797 raise SyntaxError(
4798 "projection fields cannot have equality filters")
4799 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
4800 continue
4801 elif filter.name=='__key__' and filter.op=='=':
4802 if filter.value==0:
4803 items = []
4804 elif isinstance(filter.value, Key):
4805
4806
4807
4808 item = tableobj.get(filter.value)
4809 items = (item and [item]) or []
4810 else:
4811
4812
4813
4814 item = tableobj.get_by_id(filter.value)
4815 items = (item and [item]) or []
4816 elif isinstance(items,list):
4817 items = [i for i in items if filter.apply(
4818 getattr(item,filter.name),filter.value)]
4819 else:
4820 if filter.name=='__key__' and filter.op != 'in':
4821 items.order('__key__')
4822 items = items.filter('%s %s' % (filter.name,filter.op),
4823 filter.value)
4824 if not isinstance(items,list):
4825 if args_get('left', None):
4826 raise SyntaxError('Set: no left join in appengine')
4827 if args_get('groupby', None):
4828 raise SyntaxError('Set: no groupby in appengine')
4829 orderby = args_get('orderby', False)
4830 if orderby:
4831
4832 if isinstance(orderby, (list, tuple)):
4833 orderby = xorify(orderby)
4834 if isinstance(orderby,Expression):
4835 orderby = self.expand(orderby)
4836 orders = orderby.split(', ')
4837 for order in orders:
4838 order={'-id':'-__key__','id':'__key__'}.get(order,order)
4839 items = items.order(order)
4840 if args_get('limitby', None):
4841 (lmin, lmax) = attributes['limitby']
4842 (limit, offset) = (lmax - lmin, lmin)
4843 rows = items.fetch(limit,offset=offset)
4844
4845
4846 if args_get('reusecursor'):
4847 db['_lastcursor'] = items.cursor()
4848 items = rows
4849 return (items, tablename, projection or db[tablename].fields)
4850
4851 - def select(self,query,fields,attributes):
4852 """
4853 This is the GAE version of select. some notes to consider:
4854 - db['_lastsql'] is not set because there is not SQL statement string
4855 for a GAE query
4856 - 'nativeRef' is a magical fieldname used for self references on GAE
4857 - optional attribute 'projection' when set to True will trigger
4858 use of the GAE projection queries. note that there are rules for
4859 what is accepted imposed by GAE: each field must be indexed,
4860 projection queries cannot contain blob or text fields, and you
4861 cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
4862 - optional attribute 'filterfields' when set to True web2py will only
4863 parse the explicitly listed fields into the Rows object, even though
4864 all fields are returned in the query. This can be used to reduce
4865 memory usage in cases where true projection queries are not
4866 usable.
4867 - optional attribute 'reusecursor' allows use of cursor with queries
4868 that have the limitby attribute. Set the attribute to True for the
4869 first query, set it to the value of db['_lastcursor'] to continue
4870 a previous query. The user must save the cursor value between
4871 requests, and the filters must be identical. It is up to the user
4872 to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
4873 """
4874
4875 (items, tablename, fields) = self.select_raw(query,fields,attributes)
4876
4877 rows = [[(t==self.db[tablename]._id.name and item) or \
4878 (t=='nativeRef' and item) or getattr(item, t) \
4879 for t in fields] for item in items]
4880 colnames = ['%s.%s' % (tablename, t) for t in fields]
4881 processor = attributes.get('processor',self.parse)
4882 return processor(rows,fields,colnames,False)
4883
4884 - def count(self,query,distinct=None,limit=None):
4885 if distinct:
4886 raise RuntimeError("COUNT DISTINCT not supported")
4887 (items, tablename, fields) = self.select_raw(query)
4888
4889 try:
4890 return len(items)
4891 except TypeError:
4892 return items.count(limit=limit)
4893
4894 - def delete(self,tablename, query):
4895 """
4896 This function was changed on 2010-05-04 because according to
4897 http://code.google.com/p/googleappengine/issues/detail?id=3119
4898 GAE no longer supports deleting more than 1000 records.
4899 """
4900
4901 (items, tablename, fields) = self.select_raw(query)
4902
4903 if not isinstance(items,list):
4904
4905
4906 leftitems = items.fetch(1000, keys_only=True)
4907 counter = 0
4908 while len(leftitems):
4909 counter += len(leftitems)
4910 gae.delete(leftitems)
4911 leftitems = items.fetch(1000, keys_only=True)
4912 else:
4913 counter = len(items)
4914 gae.delete(items)
4915 return counter
4916
4917 - def update(self,tablename,query,update_fields):
4918
4919 (items, tablename, fields) = self.select_raw(query)
4920 counter = 0
4921 for item in items:
4922 for field, value in update_fields:
4923 setattr(item, field.name, self.represent(value,field.type))
4924 item.put()
4925 counter += 1
4926 LOGGER.info(str(counter))
4927 return counter
4928
4929 - def insert(self,table,fields):
4930 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
4931
4932 tmp = table._tableobj(**dfields)
4933 tmp.put()
4934 rid = Reference(tmp.key().id())
4935 (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key())
4936 return rid
4937
4939 parsed_items = []
4940 for item in items:
4941 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
4942 parsed_items.append(table._tableobj(**dfields))
4943 gae.put(parsed_items)
4944 return True
4945
4947 return uuid.UUID(uuidv).int
4948
4950 return str(uuid.UUID(int=n))
4951
4953 drivers = ('couchdb',)
4954
4955 uploads_in_blob = True
4956 types = {
4957 'boolean': bool,
4958 'string': str,
4959 'text': str,
4960 'json': str,
4961 'password': str,
4962 'blob': str,
4963 'upload': str,
4964 'integer': long,
4965 'bigint': long,
4966 'float': float,
4967 'double': float,
4968 'date': datetime.date,
4969 'time': datetime.time,
4970 'datetime': datetime.datetime,
4971 'id': long,
4972 'reference': long,
4973 'list:string': list,
4974 'list:integer': list,
4975 'list:reference': list,
4976 }
4977
4979 - def file_open(self, filename, mode='rb', lock=True): pass
4981
4982 - def expand(self,expression,field_type=None):
4983 if isinstance(expression,Field):
4984 if expression.type=='id':
4985 return "%s._id" % expression.tablename
4986 return BaseAdapter.expand(self,expression,field_type)
4987
4988 - def AND(self,first,second):
4990
4991 - def OR(self,first,second):
4993
4994 - def EQ(self,first,second):
4998
4999 - def NE(self,first,second):
5003
5004 - def COMMA(self,first,second):
5006
5008 value = NoSQLAdapter.represent(self, obj, fieldtype)
5009 if fieldtype=='id':
5010 return repr(str(long(value)))
5011 elif fieldtype in ('date','time','datetime','boolean'):
5012 return serializers.json(value)
5013 return repr(not isinstance(value,unicode) and value \
5014 or value and value.encode('utf8'))
5015
5016 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
5017 pool_size=0,folder=None,db_codec ='UTF-8',
5018 credential_decoder=IDENTITY, driver_args={},
5019 adapter_args={}, do_connect=True, after_connection=None):
5020 self.db = db
5021 self.uri = uri
5022 if do_connect: self.find_driver(adapter_args)
5023 self.dbengine = 'couchdb'
5024 self.folder = folder
5025 db['_lastsql'] = ''
5026 self.db_codec = 'UTF-8'
5027 self._after_connection = after_connection
5028 self.pool_size = pool_size
5029
5030 url='http://'+uri[10:]
5031 def connector(url=url,driver_args=driver_args):
5032 return self.driver.Server(url,**driver_args)
5033 self.reconnect(connector,cursor=False)
5034
5035 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
5036 if migrate:
5037 try:
5038 self.connection.create(table._tablename)
5039 except:
5040 pass
5041
5042 - def insert(self,table,fields):
5049
5050 - def _select(self,query,fields,attributes):
5051 if not isinstance(query,Query):
5052 raise SyntaxError("Not Supported")
5053 for key in set(attributes.keys())-SELECT_ARGS:
5054 raise SyntaxError('invalid select attribute: %s' % key)
5055 new_fields=[]
5056 for item in fields:
5057 if isinstance(item,SQLALL):
5058 new_fields += item._table
5059 else:
5060 new_fields.append(item)
5061 def uid(fd):
5062 return fd=='id' and '_id' or fd
5063 def get(row,fd):
5064 return fd=='id' and long(row['_id']) or row.get(fd,None)
5065 fields = new_fields
5066 tablename = self.get_table(query)
5067 fieldnames = [f.name for f in (fields or self.db[tablename])]
5068 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
5069 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
5070 fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
5071 dict(t=tablename,
5072 query=self.expand(query),
5073 order='%s._id' % tablename,
5074 fields=fields)
5075 return fn, colnames
5076
5077 - def select(self,query,fields,attributes):
5078 if not isinstance(query,Query):
5079 raise SyntaxError("Not Supported")
5080 fn, colnames = self._select(query,fields,attributes)
5081 tablename = colnames[0].split('.')[0]
5082 ctable = self.connection[tablename]
5083 rows = [cols['value'] for cols in ctable.query(fn)]
5084 processor = attributes.get('processor',self.parse)
5085 return processor(rows,fields,colnames,False)
5086
5087 - def delete(self,tablename,query):
5088 if not isinstance(query,Query):
5089 raise SyntaxError("Not Supported")
5090 if query.first.type=='id' and query.op==self.EQ:
5091 id = query.second
5092 tablename = query.first.tablename
5093 assert(tablename == query.first.tablename)
5094 ctable = self.connection[tablename]
5095 try:
5096 del ctable[str(id)]
5097 return 1
5098 except couchdb.http.ResourceNotFound:
5099 return 0
5100 else:
5101 tablename = self.get_table(query)
5102 rows = self.select(query,[self.db[tablename]._id],{})
5103 ctable = self.connection[tablename]
5104 for row in rows:
5105 del ctable[str(row.id)]
5106 return len(rows)
5107
5108 - def update(self,tablename,query,fields):
5109 if not isinstance(query,Query):
5110 raise SyntaxError("Not Supported")
5111 if query.first.type=='id' and query.op==self.EQ:
5112 id = query.second
5113 tablename = query.first.tablename
5114 ctable = self.connection[tablename]
5115 try:
5116 doc = ctable[str(id)]
5117 for key,value in fields:
5118 doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
5119 ctable.save(doc)
5120 return 1
5121 except couchdb.http.ResourceNotFound:
5122 return 0
5123 else:
5124 tablename = self.get_table(query)
5125 rows = self.select(query,[self.db[tablename]._id],{})
5126 ctable = self.connection[tablename]
5127 table = self.db[tablename]
5128 for row in rows:
5129 doc = ctable[str(row.id)]
5130 for key,value in fields:
5131 doc[key.name] = self.represent(value,table[key.name].type)
5132 ctable.save(doc)
5133 return len(rows)
5134
5135 - def count(self,query,distinct=None):
5136 if distinct:
5137 raise RuntimeError("COUNT DISTINCT not supported")
5138 if not isinstance(query,Query):
5139 raise SyntaxError("Not Supported")
5140 tablename = self.get_table(query)
5141 rows = self.select(query,[self.db[tablename]._id],{})
5142 return len(rows)
5143
5145 """
5146 validates that the given text is clean: only contains [0-9a-zA-Z_]
5147 """
5148 if not REGEX_ALPHANUMERIC.match(text):
5149 raise SyntaxError('invalid table or field name: %s' % text)
5150 return text
5151
5153 native_json = True
5154 drivers = ('pymongo',)
5155
5156 uploads_in_blob = True
5157
5158 types = {
5159 'boolean': bool,
5160 'string': str,
5161 'text': str,
5162 'json': str,
5163 'password': str,
5164 'blob': str,
5165 'upload': str,
5166 'integer': long,
5167 'bigint': long,
5168 'float': float,
5169 'double': float,
5170 'date': datetime.date,
5171 'time': datetime.time,
5172 'datetime': datetime.datetime,
5173 'id': long,
5174 'reference': long,
5175 'list:string': list,
5176 'list:integer': list,
5177 'list:reference': list,
5178 }
5179
5180 error_messages = {"javascript_needed": "This must yet be replaced" +
5181 " with javascript in order to work."}
5182
5183 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
5184 pool_size=0, folder=None, db_codec ='UTF-8',
5185 credential_decoder=IDENTITY, driver_args={},
5186 adapter_args={}, do_connect=True, after_connection=None):
5187
5188 self.db = db
5189 self.uri = uri
5190 if do_connect: self.find_driver(adapter_args)
5191 import random
5192 from bson.objectid import ObjectId
5193 from bson.son import SON
5194 import pymongo.uri_parser
5195
5196 m = pymongo.uri_parser.parse_uri(uri)
5197
5198 self.SON = SON
5199 self.ObjectId = ObjectId
5200 self.random = random
5201
5202 self.dbengine = 'mongodb'
5203 self.folder = folder
5204 db['_lastsql'] = ''
5205 self.db_codec = 'UTF-8'
5206 self._after_connection = after_connection
5207 self.pool_size = pool_size
5208
5209
5210 self.minimumreplication = adapter_args.get('minimumreplication',0)
5211
5212
5213
5214
5215 self.safe = adapter_args.get('safe',True)
5216
5217 if isinstance(m,tuple):
5218 m = {"database" : m[1]}
5219 if m.get('database')==None:
5220 raise SyntaxError("Database is required!")
5221 def connector(uri=self.uri,m=m):
5222 try:
5223
5224 if hasattr(self.driver, "MongoClient"):
5225 Connection = self.driver.MongoClient
5226 else:
5227 Connection = self.driver.Connection
5228 return Connection(uri)[m.get('database')]
5229 except self.driver.errors.ConnectionFailure:
5230 inst = sys.exc_info()[1]
5231 raise SyntaxError("The connection to " +
5232 uri + " could not be made")
5233
5234 self.reconnect(connector,cursor=False)
5235
5237 """ Convert input to a valid Mongodb ObjectId instance
5238
5239 self.object_id("<random>") -> ObjectId (not unique) instance """
5240 if not arg:
5241 arg = 0
5242 if isinstance(arg, basestring):
5243
5244 rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
5245 if arg.isdigit() and (not rawhex):
5246 arg = int(arg)
5247 elif arg == "<random>":
5248 arg = int("0x%sL" % \
5249 "".join([self.random.choice("0123456789abcdef") \
5250 for x in range(24)]), 0)
5251 elif arg.isalnum():
5252 if not arg.startswith("0x"):
5253 arg = "0x%s" % arg
5254 try:
5255 arg = int(arg, 0)
5256 except ValueError, e:
5257 raise ValueError(
5258 "invalid objectid argument string: %s" % e)
5259 else:
5260 raise ValueError("Invalid objectid argument string. " +
5261 "Requires an integer or base 16 value")
5262 elif isinstance(arg, self.ObjectId):
5263 return arg
5264
5265 if not isinstance(arg, (int, long)):
5266 raise TypeError("object_id argument must be of type " +
5267 "ObjectId or an objectid representable integer")
5268 if arg == 0:
5269 hexvalue = "".zfill(24)
5270 else:
5271 hexvalue = hex(arg)[2:].replace("L", "")
5272 return self.ObjectId(hexvalue)
5273
5275
5276 if isinstance(value, self.ObjectId):
5277 value = long(str(value), 16)
5278 return super(MongoDBAdapter,
5279 self).parse_reference(value, field_type)
5280
5281 - def parse_id(self, value, field_type):
5282 if isinstance(value, self.ObjectId):
5283 value = long(str(value), 16)
5284 return super(MongoDBAdapter,
5285 self).parse_id(value, field_type)
5286
5288
5289 if isinstance(obj, self.ObjectId):
5290 value = obj
5291 else:
5292 value = NoSQLAdapter.represent(self, obj, fieldtype)
5293
5294 if fieldtype =='date':
5295 if value == None:
5296 return value
5297
5298 t = datetime.time(0, 0, 0)
5299
5300
5301 return datetime.datetime.combine(value, t)
5302 elif fieldtype == 'time':
5303 if value == None:
5304 return value
5305
5306 d = datetime.date(2000, 1, 1)
5307
5308
5309 return datetime.datetime.combine(d, value)
5310 elif (isinstance(fieldtype, basestring) and
5311 fieldtype.startswith('list:')):
5312 if fieldtype.startswith('list:reference'):
5313 newval = []
5314 for v in value:
5315 newval.append(self.object_id(v))
5316 return newval
5317 return value
5318 elif ((isinstance(fieldtype, basestring) and
5319 fieldtype.startswith("reference")) or
5320 (isinstance(fieldtype, Table))):
5321 value = self.object_id(value)
5322
5323 return value
5324
5325
5326
5327
5328 - def insert(self, table, fields, safe=None):
5329 if safe==None:
5330 safe = self.safe
5331 ctable = self.connection[table._tablename]
5332 values = dict()
5333 for k, v in fields:
5334 if not k.name in ["id", "safe"]:
5335 fieldname = k.name
5336 fieldtype = table[k.name].type
5337 if ("reference" in fieldtype) or (fieldtype=="id"):
5338 values[fieldname] = self.object_id(v)
5339 else:
5340 values[fieldname] = self.represent(v, fieldtype)
5341 ctable.insert(values, safe=safe)
5342 return long(str(values['_id']), 16)
5343
5344 - def create_table(self, table, migrate=True, fake_migrate=False,
5345 polymodel=None, isCapped=False):
5346 if isCapped:
5347 raise RuntimeError("Not implemented")
5348
5349 - def count(self, query, distinct=None, snapshot=True):
5350 if distinct:
5351 raise RuntimeError("COUNT DISTINCT not supported")
5352 if not isinstance(query,Query):
5353 raise SyntaxError("Not Supported")
5354 tablename = self.get_table(query)
5355 return long(self.select(query,[self.db[tablename]._id], {},
5356 count=True,snapshot=snapshot)['count'])
5357
5358
5359
5360
5361
5362 - def expand(self, expression, field_type=None):
5363 if isinstance(expression, Query):
5364
5365
5366
5367
5368
5369 if isinstance(expression.first,Field) and \
5370 ((expression.first.type == 'id') or \
5371 ("reference" in expression.first.type)):
5372 if expression.first.type == 'id':
5373 expression.first.name = '_id'
5374
5375 if isinstance(expression.second, (tuple, list, set)):
5376 expression.second = [self.object_id(item) for
5377 item in expression.second]
5378 else:
5379 expression.second = self.object_id(expression.second)
5380 result = expression.op(expression.first, expression.second)
5381
5382 if isinstance(expression, Field):
5383 if expression.type=='id':
5384 result = "_id"
5385 else:
5386 result = expression.name
5387 elif isinstance(expression, (Expression, Query)):
5388 if not expression.second is None:
5389 result = expression.op(expression.first, expression.second)
5390 elif not expression.first is None:
5391 result = expression.op(expression.first)
5392 elif not isinstance(expression.op, str):
5393 result = expression.op()
5394 else:
5395 result = expression.op
5396 elif field_type:
5397 result = self.represent(expression,field_type)
5398 elif isinstance(expression,(list,tuple)):
5399 result = ','.join(self.represent(item,field_type) for
5400 item in expression)
5401 else:
5402 result = expression
5403 return result
5404
5405 - def _select(self, query, fields, attributes):
5406 if 'for_update' in attributes:
5407 logging.warn('mongodb does not support for_update')
5408 for key in set(attributes.keys())-set(('limitby',
5409 'orderby','for_update')):
5410 if attributes[key]!=None:
5411 logging.warn('select attribute not implemented: %s' % key)
5412
5413 new_fields=[]
5414 mongosort_list = []
5415
5416
5417 orderby = attributes.get('orderby', False)
5418 limitby = attributes.get('limitby', False)
5419
5420 if orderby:
5421 if isinstance(orderby, (list, tuple)):
5422 orderby = xorify(orderby)
5423
5424
5425 for f in self.expand(orderby).split(','):
5426 if f.startswith('-'):
5427 mongosort_list.append((f[1:], -1))
5428 else:
5429 mongosort_list.append((f, 1))
5430
5431 if limitby:
5432 limitby_skip, limitby_limit = limitby
5433 else:
5434 limitby_skip = limitby_limit = 0
5435
5436 mongofields_dict = self.SON()
5437 mongoqry_dict = {}
5438 for item in fields:
5439 if isinstance(item, SQLALL):
5440 new_fields += item._table
5441 else:
5442 new_fields.append(item)
5443 fields = new_fields
5444 if isinstance(query,Query):
5445 tablename = self.get_table(query)
5446 elif len(fields) != 0:
5447 tablename = fields[0].tablename
5448 else:
5449 raise SyntaxError("The table name could not be found in " +
5450 "the query nor from the select statement.")
5451 mongoqry_dict = self.expand(query)
5452 fields = fields or self.db[tablename]
5453 for field in fields:
5454 mongofields_dict[field.name] = 1
5455
5456 return tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
5457 limitby_limit, limitby_skip
5458
5459
5460 - def select(self, query, fields, attributes, count=False,
5461 snapshot=False):
5462
5463 tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
5464 limitby_limit, limitby_skip = self._select(query, fields, attributes)
5465 ctable = self.connection[tablename]
5466
5467 if count:
5468 return {'count' : ctable.find(
5469 mongoqry_dict, mongofields_dict,
5470 skip=limitby_skip, limit=limitby_limit,
5471 sort=mongosort_list, snapshot=snapshot).count()}
5472 else:
5473
5474 mongo_list_dicts = ctable.find(mongoqry_dict,
5475 mongofields_dict, skip=limitby_skip,
5476 limit=limitby_limit, sort=mongosort_list,
5477 snapshot=snapshot)
5478 rows = []
5479
5480
5481 colnames = []
5482 newnames = []
5483 for field in fields:
5484 colname = str(field)
5485 colnames.append(colname)
5486 tablename, fieldname = colname.split(".")
5487 if fieldname == "_id":
5488
5489 field.name = "id"
5490 newnames.append(".".join((tablename, field.name)))
5491
5492 for record in mongo_list_dicts:
5493 row=[]
5494 for colname in colnames:
5495 tablename, fieldname = colname.split(".")
5496
5497
5498 if fieldname == "id": fieldname = "_id"
5499 if fieldname in record:
5500 value = record[fieldname]
5501 else:
5502 value = None
5503 row.append(value)
5504 rows.append(row)
5505
5506 processor = attributes.get('processor', self.parse)
5507 result = processor(rows, fields, newnames, False)
5508 return result
5509
5510
5514
5515 - def drop(self, table, mode=''):
5518
5519
5520 - def truncate(self, table, mode, safe=None):
5521 if safe == None:
5522 safe=self.safe
5523 ctable = self.connection[table._tablename]
5524 ctable.remove(None, safe=True)
5525
5526 - def oupdate(self, tablename, query, fields):
5527 if not isinstance(query, Query):
5528 raise SyntaxError("Not Supported")
5529 filter = None
5530 if query:
5531 filter = self.expand(query)
5532 modify = {'$set': dict((k.name, self.represent(v, k.type)) for
5533 k, v in fields)}
5534 return modify, filter
5535
5536 - def update(self, tablename, query, fields, safe=None):
5537 if safe == None:
5538 safe = self.safe
5539
5540
5541 if not isinstance(query, Query):
5542 raise RuntimeError("Not implemented")
5543 amount = self.count(query, False)
5544 modify, filter = self.oupdate(tablename, query, fields)
5545 try:
5546 result = self.connection[tablename].update(filter,
5547 modify, multi=True, safe=safe)
5548 if safe:
5549 try:
5550
5551 return result["n"]
5552 except (KeyError, AttributeError, TypeError):
5553 return amount
5554 else:
5555 return amount
5556 except Exception, e:
5557
5558 raise RuntimeError("uncaught exception when updating rows: %s" % e)
5559
5560
5561 - def _update(self,tablename,query,fields):
5563
5564 - def delete(self, tablename, query, safe=None):
5565 if safe is None:
5566 safe = self.safe
5567 amount = 0
5568 amount = self.count(query, False)
5569 if not isinstance(query, Query):
5570 raise RuntimeError("query type %s is not supported" % \
5571 type(query))
5572 filter = self.expand(query)
5573 self._delete(tablename, filter, safe=safe)
5574 return amount
5575
5576 - def _delete(self, tablename, filter, safe=None):
5577 return self.connection[tablename].remove(filter, safe=safe)
5578
5580 return [self.insert(table,item) for item in items]
5581
5582
5583 - def NOT(self, first):
5584 result = {}
5585 result["$not"] = self.expand(first)
5586 return result
5587
5588 - def AND(self,first,second):
5593
5594 - def OR(self,first,second):
5595
5596 result = {}
5597 f = self.expand(first)
5598 s = self.expand(second)
5599 result['$or'] = [f,s]
5600 return result
5601
5602 - def BELONGS(self, first, second):
5603 if isinstance(second, str):
5604 return {self.expand(first) : {"$in" : [ second[:-1]]} }
5605 elif second==[] or second==() or second==set():
5606 return {1:0}
5607 items = [self.expand(item, first.type) for item in second]
5608 return {self.expand(first) : {"$in" : items} }
5609
5610 - def EQ(self,first,second):
5611 result = {}
5612 result[self.expand(first)] = self.expand(second)
5613 return result
5614
5615 - def NE(self, first, second=None):
5616 result = {}
5617 result[self.expand(first)] = {'$ne': self.expand(second)}
5618 return result
5619
5620 - def LT(self,first,second=None):
5621 if second is None:
5622 raise RuntimeError("Cannot compare %s < None" % first)
5623 result = {}
5624 result[self.expand(first)] = {'$lt': self.expand(second)}
5625 return result
5626
5627 - def LE(self,first,second=None):
5628 if second is None:
5629 raise RuntimeError("Cannot compare %s <= None" % first)
5630 result = {}
5631 result[self.expand(first)] = {'$lte': self.expand(second)}
5632 return result
5633
5634 - def GT(self,first,second):
5635 result = {}
5636 result[self.expand(first)] = {'$gt': self.expand(second)}
5637 return result
5638
5639 - def GE(self,first,second=None):
5640 if second is None:
5641 raise RuntimeError("Cannot compare %s >= None" % first)
5642 result = {}
5643 result[self.expand(first)] = {'$gte': self.expand(second)}
5644 return result
5645
5646 - def ADD(self, first, second):
5650
5651 - def SUB(self, first, second):
5655
5656 - def MUL(self, first, second):
5660
5661 - def DIV(self, first, second):
5665
5666 - def MOD(self, first, second):
5670
5671 - def AS(self, first, second):
5672 raise NotImplementedError(self.error_messages["javascript_needed"])
5673 return '%s AS %s' % (self.expand(first), second)
5674
5675
5676
5677
5678 - def ON(self, first, second):
5679 raise NotImplementedError("This is not possible in NoSQL" +
5680 " but can be simulated with a wrapper.")
5681 return '%s ON %s' % (self.expand(first), self.expand(second))
5682
5683
5684
5685
5686 - def COMMA(self, first, second):
5688
5689 - def LIKE(self, first, second):
5693
5695
5696 return {self.expand(first): ('/^%s/' % \
5697 self.expand(second, 'string'))}
5698
5700
5701 return {self.expand(first): ('/%s^/' % \
5702 self.expand(second, 'string'))}
5703
5704 - def CONTAINS(self, first, second, case_sensitive=False):
5705
5706
5707
5708 return {self.expand(first) : ('/%s/' % \
5709 self.expand(second, 'string'))}
5710
5711 - def LIKE(self, first, second):
5716
5717
5719
5720 import re
5721 return {self.expand(first): {'$regex' : '^' +
5722 re.escape(self.expand(second,
5723 'string'))}}
5724
5725
5727
5728
5729
5730
5731 import re
5732 return {self.expand(first): {'$regex': \
5733 re.escape(self.expand(second, 'string')) + '$'}}
5734
5735
5736 - def CONTAINS(self, first, second, case_sensitive=False):
5737
5738
5739
5740
5741 return {self.expand(first) : {' $regex': \
5742 ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
5743
5746 drivers = ('imaplib',)
5747
5748 """ IMAP server adapter
5749
5750 This class is intended as an interface with
5751 email IMAP servers to perform simple queries in the
5752 web2py DAL query syntax, so email read, search and
5753 other related IMAP mail services (as those implemented
5754 by brands like Google(r), and Yahoo!(r)
5755 can be managed from web2py applications.
5756
5757 The code uses examples by Yuji Tomita on this post:
5758 http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
5759 and is based in docs for Python imaplib, python email
5760 and email IETF's (i.e. RFC2060 and RFC3501)
5761
5762 This adapter was tested with a small set of operations with Gmail(r). Other
5763 services requests could raise command syntax and response data issues.
5764
5765 It creates its table and field names "statically",
5766 meaning that the developer should leave the table and field
5767 definitions to the DAL instance by calling the adapter's
5768 .define_tables() method. The tables are defined with the
5769 IMAP server mailbox list information.
5770
5771 .define_tables() returns a dictionary mapping dal tablenames
5772 to the server mailbox names with the following structure:
5773
5774 {<tablename>: str <server mailbox name>}
5775
5776 Here is a list of supported fields:
5777
5778 Field Type Description
5779 ################################################################
5780 uid string
5781 answered boolean Flag
5782 created date
5783 content list:string A list of text or html parts
5784 to string
5785 cc string
5786 bcc string
5787 size integer the amount of octets of the message*
5788 deleted boolean Flag
5789 draft boolean Flag
5790 flagged boolean Flag
5791 sender string
5792 recent boolean Flag
5793 seen boolean Flag
5794 subject string
5795 mime string The mime header declaration
5796 email string The complete RFC822 message**
5797 attachments <type list> Each non text part as dict
5798 encoding string The main detected encoding
5799
5800 *At the application side it is measured as the length of the RFC822
5801 message string
5802
5803 WARNING: As row id's are mapped to email sequence numbers,
5804 make sure your imap client web2py app does not delete messages
5805 during select or update actions, to prevent
5806 updating or deleting different messages.
5807 Sequence numbers change whenever the mailbox is updated.
5808 To avoid this sequence numbers issues, it is recommended the use
5809 of uid fields in query references (although the update and delete
5810 in separate actions rule still applies).
5811
5812 # This is the code recommended to start imap support
5813 # at the app's model:
5814
5815 imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
5816 imapdb.define_tables()
5817
5818 Here is an (incomplete) list of possible imap commands:
5819
5820 # Count today's unseen messages
5821 # smaller than 6000 octets from the
5822 # inbox mailbox
5823
5824 q = imapdb.INBOX.seen == False
5825 q &= imapdb.INBOX.created == datetime.date.today()
5826 q &= imapdb.INBOX.size < 6000
5827 unread = imapdb(q).count()
5828
5829 # Fetch last query messages
5830 rows = imapdb(q).select()
5831
5832 # it is also possible to filter query select results with limitby and
5833 # sequences of mailbox fields
5834
5835 set.select(<fields sequence>, limitby=(<int>, <int>))
5836
5837 # Mark last query messages as seen
5838 messages = [row.uid for row in rows]
5839 seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
5840
5841 # Delete messages in the imap database that have mails from mr. Gumby
5842
5843 deleted = 0
5844 for mailbox in imapdb.tables
5845 deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
5846
5847 # It is possible also to mark messages for deletion instead of ereasing them
5848 # directly with set.update(deleted=True)
5849
5850
5851 # This object give access
5852 # to the adapter auto mailbox
5853 # mapped names (which native
5854 # mailbox has what table name)
5855
5856 imapdb.mailboxes <dict> # tablename, server native name pairs
5857
5858 # To retrieve a table native mailbox name use:
5859 imapdb.<table>.mailbox
5860
5861 ### New features v2.4.1:
5862
5863 # Declare mailboxes statically with tablename, name pairs
5864 # This avoids the extra server names retrieval
5865
5866 imapdb.define_tables({"inbox": "INBOX"})
5867
5868 # Selects without content/attachments/email columns will only
5869 # fetch header and flags
5870
5871 imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
5872 """
5873
5874 types = {
5875 'string': str,
5876 'text': str,
5877 'date': datetime.date,
5878 'datetime': datetime.datetime,
5879 'id': long,
5880 'boolean': bool,
5881 'integer': int,
5882 'bigint': long,
5883 'blob': str,
5884 'list:string': str,
5885 }
5886
5887 dbengine = 'imap'
5888
5889 REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
5890
5891 - def __init__(self,
5892 db,
5893 uri,
5894 pool_size=0,
5895 folder=None,
5896 db_codec ='UTF-8',
5897 credential_decoder=IDENTITY,
5898 driver_args={},
5899 adapter_args={},
5900 do_connect=True,
5901 after_connection=None):
5902
5903
5904
5905
5906 self.db = db
5907 self.uri = uri
5908 if do_connect: self.find_driver(adapter_args)
5909 self.pool_size=pool_size
5910 self.folder = folder
5911 self.db_codec = db_codec
5912 self._after_connection = after_connection
5913 self.credential_decoder = credential_decoder
5914 self.driver_args = driver_args
5915 self.adapter_args = adapter_args
5916 self.mailbox_size = None
5917 self.static_names = None
5918 self.charset = sys.getfilesystemencoding()
5919
5920 self.imap4 = None
5921 uri = uri.split("://")[1]
5922
5923 """ MESSAGE is an identifier for sequence number"""
5924
5925 self.flags = ['\\Deleted', '\\Draft', '\\Flagged',
5926 '\\Recent', '\\Seen', '\\Answered']
5927 self.search_fields = {
5928 'id': 'MESSAGE', 'created': 'DATE',
5929 'uid': 'UID', 'sender': 'FROM',
5930 'to': 'TO', 'cc': 'CC',
5931 'bcc': 'BCC', 'content': 'TEXT',
5932 'size': 'SIZE', 'deleted': '\\Deleted',
5933 'draft': '\\Draft', 'flagged': '\\Flagged',
5934 'recent': '\\Recent', 'seen': '\\Seen',
5935 'subject': 'SUBJECT', 'answered': '\\Answered',
5936 'mime': None, 'email': None,
5937 'attachments': None
5938 }
5939
5940 db['_lastsql'] = ''
5941
5942 m = self.REGEX_URI.match(uri)
5943 user = m.group('user')
5944 password = m.group('password')
5945 host = m.group('host')
5946 port = int(m.group('port'))
5947 over_ssl = False
5948 if port==993:
5949 over_ssl = True
5950
5951 driver_args.update(host=host,port=port, password=password, user=user)
5952 def connector(driver_args=driver_args):
5953
5954
5955 if over_ssl:
5956 self.imap4 = self.driver.IMAP4_SSL
5957 else:
5958 self.imap4 = self.driver.IMAP4
5959 connection = self.imap4(driver_args["host"], driver_args["port"])
5960 data = connection.login(driver_args["user"], driver_args["password"])
5961
5962
5963 connection.mailbox_names = None
5964
5965
5966 connection.cursor = lambda : True
5967
5968 return connection
5969
5970 self.db.define_tables = self.define_tables
5971 self.connector = connector
5972 if do_connect: self.reconnect()
5973
6018
6020 last_message = None
6021
6022
6023 if not isinstance(self.connection.mailbox_names, dict):
6024 self.get_mailboxes()
6025 try:
6026 result = self.connection.select(self.connection.mailbox_names[tablename])
6027 last_message = int(result[1][0])
6028 except (IndexError, ValueError, TypeError, KeyError):
6029 e = sys.exc_info()[1]
6030 LOGGER.debug("Error retrieving the last mailbox sequence number. %s" % str(e))
6031 return last_message
6032
6034 if not isinstance(self.connection.mailbox_names, dict):
6035 self.get_mailboxes()
6036
6037
6038 last_message = self.get_last_message(tablename)
6039 result, data = self.connection.uid("search", None, "(ALL)")
6040 uid_list = data[0].strip().split()
6041 if len(uid_list) <= 0:
6042 return None
6043 else:
6044 return (uid_list[0], uid_list[-1])
6045
6047 if add is None:
6048 add = datetime.timedelta()
6049 """ Convert a date object to a string
6050 with d-Mon-Y style for IMAP or the inverse
6051 case
6052
6053 add <timedelta> adds to the date object
6054 """
6055 months = [None, "Jan","Feb","Mar","Apr","May","Jun",
6056 "Jul", "Aug","Sep","Oct","Nov","Dec"]
6057 if isinstance(date, basestring):
6058
6059 try:
6060 dayname, datestring = date.split(",")
6061 except (ValueError):
6062 LOGGER.debug("Could not parse date text: %s" % date)
6063 return None
6064 date_list = datestring.strip().split()
6065 year = int(date_list[2])
6066 month = months.index(date_list[1])
6067 day = int(date_list[0])
6068 hms = map(int, date_list[3].split(":"))
6069 return datetime.datetime(year, month, day,
6070 hms[0], hms[1], hms[2]) + add
6071 elif isinstance(date, (datetime.datetime, datetime.date)):
6072 return (date + add).strftime("%d-%b-%Y")
6073
6074 else:
6075 return None
6076
6077 @staticmethod
6079 from email.header import decode_header
6080 text, encoding = decode_header(f)[0]
6081 return text
6082
6083 - def encode_text(self, text, charset, errors="replace"):
6084 """ convert text for mail to unicode"""
6085 if text is None:
6086 text = ""
6087 else:
6088 if isinstance(text, str):
6089 if charset is None:
6090 text = unicode(text, "utf-8", errors)
6091 else:
6092 text = unicode(text, charset, errors)
6093 else:
6094 raise Exception("Unsupported mail text type %s" % type(text))
6095 return text.encode("utf-8")
6096
6098 charset = message.get_content_charset()
6099 return charset
6100
6102 """ Query the mail database for mailbox names """
6103 if self.static_names:
6104
6105 self.connection.mailbox_names = self.static_names
6106 return self.static_names.keys()
6107
6108 mailboxes_list = self.connection.list()
6109 self.connection.mailbox_names = dict()
6110 mailboxes = list()
6111 x = 0
6112 for item in mailboxes_list[1]:
6113 x = x + 1
6114 item = item.strip()
6115 if not "NOSELECT" in item.upper():
6116 sub_items = item.split("\"")
6117 sub_items = [sub_item for sub_item in sub_items \
6118 if len(sub_item.strip()) > 0]
6119
6120 mailbox = sub_items[-1]
6121
6122
6123 mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox)))
6124 mailboxes.append(mailbox_name)
6125 self.connection.mailbox_names[mailbox_name] = mailbox
6126
6127 return mailboxes
6128
6130 nofield = True
6131 tablename = None
6132 attr = query
6133 while nofield:
6134 if hasattr(attr, "first"):
6135 attr = attr.first
6136 if isinstance(attr, Field):
6137 return attr.tablename
6138 elif isinstance(attr, Query):
6139 pass
6140 else:
6141 return None
6142 else:
6143 return None
6144 return tablename
6145
6147 if self.search_fields.get(flag, None) in self.flags:
6148 return True
6149 else:
6150 return False
6151
6153 """
6154 Auto create common IMAP fileds
6155
6156 This function creates fields definitions "statically"
6157 meaning that custom fields as in other adapters should
6158 not be supported and definitions handled on a service/mode
6159 basis (local syntax for Gmail(r), Ymail(r)
6160
6161 Returns a dictionary with tablename, server native mailbox name
6162 pairs.
6163 """
6164 if mailbox_names:
6165
6166 self.static_names = mailbox_names
6167 else:
6168 self.static_names = None
6169 if not isinstance(self.connection.mailbox_names, dict):
6170 self.get_mailboxes()
6171
6172 names = self.connection.mailbox_names.keys()
6173
6174 for name in names:
6175 self.db.define_table("%s" % name,
6176 Field("uid", "string", writable=False),
6177 Field("answered", "boolean"),
6178 Field("created", "datetime", writable=False),
6179 Field("content", "list:string", writable=False),
6180 Field("to", "string", writable=False),
6181 Field("cc", "string", writable=False),
6182 Field("bcc", "string", writable=False),
6183 Field("size", "integer", writable=False),
6184 Field("deleted", "boolean"),
6185 Field("draft", "boolean"),
6186 Field("flagged", "boolean"),
6187 Field("sender", "string", writable=False),
6188 Field("recent", "boolean", writable=False),
6189 Field("seen", "boolean"),
6190 Field("subject", "string", writable=False),
6191 Field("mime", "string", writable=False),
6192 Field("email", "string", writable=False, readable=False),
6193 Field("attachments", list, writable=False, readable=False),
6194 Field("encoding")
6195 )
6196
6197
6198
6199 self.db[name].mailbox = \
6200 self.connection.mailbox_names[name]
6201
6202
6203 self.db[name].to.represent = self.db[name].cc.represent = \
6204 self.db[name].bcc.represent = self.db[name].sender.represent = \
6205 self.db[name].subject.represent = self.header_represent
6206
6207
6208 self.db.mailboxes = self.connection.mailbox_names
6209 return self.db.mailboxes
6210
6215
6216 - def _select(self, query, fields, attributes):
6220
6221 - def select(self, query, fields, attributes):
6222 """ Search and Fetch records and return web2py rows
6223 """
6224
6225 if use_common_filters(query):
6226 query = self.common_filter(query, [self.get_query_mailbox(query),])
6227
6228 import email
6229
6230
6231 tablename = None
6232 fetch_results = list()
6233
6234 if isinstance(query, Query):
6235 tablename = self.get_table(query)
6236 mailbox = self.connection.mailbox_names.get(tablename, None)
6237 if mailbox is None:
6238 raise ValueError("Mailbox name not found: %s" % mailbox)
6239 else:
6240
6241 result, selected = self.connection.select(mailbox, True)
6242 if result != "OK":
6243 raise Exception("IMAP error: %s" % selected)
6244 self.mailbox_size = int(selected[0])
6245 search_query = "(%s)" % str(query).strip()
6246 search_result = self.connection.uid("search", None, search_query)
6247
6248 if search_result[0] == "OK":
6249
6250
6251
6252
6253 limitby = attributes.get('limitby', None)
6254 messages_set = search_result[1][0].split()
6255
6256 messages_set.reverse()
6257 if limitby is not None:
6258
6259 messages_set = messages_set[int(limitby[0]):int(limitby[1])]
6260
6261
6262 if any([(field.name in ["content", "size",
6263 "attachments", "email"]) for
6264 field in fields]):
6265 imap_fields = "(RFC822 FLAGS)"
6266 else:
6267 imap_fields = "(RFC822.HEADER FLAGS)"
6268
6269 if len(messages_set) > 0:
6270
6271
6272
6273
6274 for uid in messages_set:
6275
6276 typ, data = self.connection.uid("fetch", uid, imap_fields)
6277 if typ == "OK":
6278 fr = {"message": int(data[0][0].split()[0]),
6279 "uid": long(uid),
6280 "email": email.message_from_string(data[0][1]),
6281 "raw_message": data[0][1]}
6282 fr["multipart"] = fr["email"].is_multipart()
6283
6284 fr["flags"] = self.driver.ParseFlags(data[1])
6285 fetch_results.append(fr)
6286 else:
6287
6288 raise Exception("IMAP error retrieving the body: %s" % data)
6289 else:
6290 raise Exception("IMAP search error: %s" % search_result[1])
6291 elif isinstance(query, (Expression, basestring)):
6292 raise NotImplementedError()
6293 else:
6294 raise TypeError("Unexpected query type")
6295
6296 imapqry_dict = {}
6297 imapfields_dict = {}
6298
6299 if len(fields) == 1 and isinstance(fields[0], SQLALL):
6300 allfields = True
6301 elif len(fields) == 0:
6302 allfields = True
6303 else:
6304 allfields = False
6305 if allfields:
6306 colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
6307 else:
6308 colnames = ["%s.%s" % (tablename, field.name) for field in fields]
6309
6310 for k in colnames:
6311 imapfields_dict[k] = k
6312
6313 imapqry_list = list()
6314 imapqry_array = list()
6315 for fr in fetch_results:
6316 attachments = []
6317 content = []
6318 size = 0
6319 n = int(fr["message"])
6320 item_dict = dict()
6321 message = fr["email"]
6322 uid = fr["uid"]
6323 charset = self.get_charset(message)
6324 flags = fr["flags"]
6325 raw_message = fr["raw_message"]
6326
6327
6328
6329
6330
6331
6332
6333
6334
6335
6336 if "%s.id" % tablename in colnames:
6337 item_dict["%s.id" % tablename] = n
6338 if "%s.created" % tablename in colnames:
6339 item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
6340 if "%s.uid" % tablename in colnames:
6341 item_dict["%s.uid" % tablename] = uid
6342 if "%s.sender" % tablename in colnames:
6343
6344
6345
6346 item_dict["%s.sender" % tablename] = message["From"]
6347 if "%s.to" % tablename in colnames:
6348 item_dict["%s.to" % tablename] = message["To"]
6349 if "%s.cc" % tablename in colnames:
6350 if "Cc" in message.keys():
6351 item_dict["%s.cc" % tablename] = message["Cc"]
6352 else:
6353 item_dict["%s.cc" % tablename] = ""
6354 if "%s.bcc" % tablename in colnames:
6355 if "Bcc" in message.keys():
6356 item_dict["%s.bcc" % tablename] = message["Bcc"]
6357 else:
6358 item_dict["%s.bcc" % tablename] = ""
6359 if "%s.deleted" % tablename in colnames:
6360 item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
6361 if "%s.draft" % tablename in colnames:
6362 item_dict["%s.draft" % tablename] = "\\Draft" in flags
6363 if "%s.flagged" % tablename in colnames:
6364 item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
6365 if "%s.recent" % tablename in colnames:
6366 item_dict["%s.recent" % tablename] = "\\Recent" in flags
6367 if "%s.seen" % tablename in colnames:
6368 item_dict["%s.seen" % tablename] = "\\Seen" in flags
6369 if "%s.subject" % tablename in colnames:
6370 item_dict["%s.subject" % tablename] = message["Subject"]
6371 if "%s.answered" % tablename in colnames:
6372 item_dict["%s.answered" % tablename] = "\\Answered" in flags
6373 if "%s.mime" % tablename in colnames:
6374 item_dict["%s.mime" % tablename] = message.get_content_type()
6375 if "%s.encoding" % tablename in colnames:
6376 item_dict["%s.encoding" % tablename] = charset
6377
6378
6379
6380
6381
6382
6383 if "%s.email" % tablename in colnames:
6384
6385 item_dict["%s.email" % tablename] = raw_message
6386
6387
6388
6389
6390
6391
6392 for part in message.walk():
6393 maintype = part.get_content_maintype()
6394 if ("%s.attachments" % tablename in colnames) or \
6395 ("%s.content" % tablename in colnames):
6396 if "%s.attachments" % tablename in colnames:
6397 if not ("text" in maintype):
6398 payload = part.get_payload(decode=True)
6399 if payload:
6400 attachment = {
6401 "payload": payload,
6402 "filename": part.get_filename(),
6403 "encoding": part.get_content_charset(),
6404 "mime": part.get_content_type(),
6405 "disposition": part["Content-Disposition"]}
6406 attachments.append(attachment)
6407 if "%s.content" % tablename in colnames:
6408 payload = part.get_payload(decode=True)
6409 part_charset = self.get_charset(part)
6410 if "text" in maintype:
6411 if payload:
6412 content.append(self.encode_text(payload, part_charset))
6413 if "%s.size" % tablename in colnames:
6414 if part is not None:
6415 size += len(str(part))
6416 item_dict["%s.content" % tablename] = bar_encode(content)
6417 item_dict["%s.attachments" % tablename] = attachments
6418 item_dict["%s.size" % tablename] = size
6419 imapqry_list.append(item_dict)
6420
6421
6422
6423 for item_dict in imapqry_list:
6424 imapqry_array_item = list()
6425 for fieldname in colnames:
6426 imapqry_array_item.append(item_dict[fieldname])
6427 imapqry_array.append(imapqry_array_item)
6428
6429
6430 colnames = colnames
6431 processor = attributes.get('processor',self.parse)
6432 return processor(imapqry_array, fields, colnames)
6433
6434 - def _update(self, tablename, query, fields, commit=False):
6435
6436 commands = list()
6437 if use_common_filters(query):
6438 query = self.common_filter(query, [tablename,])
6439 mark = []
6440 unmark = []
6441 if query:
6442 for item in fields:
6443 field = item[0]
6444 name = field.name
6445 value = item[1]
6446 if self.is_flag(name):
6447 flag = self.search_fields[name]
6448 if (value is not None) and (flag != "\\Recent"):
6449 if value:
6450 mark.append(flag)
6451 else:
6452 unmark.append(flag)
6453 result, data = self.connection.select(
6454 self.connection.mailbox_names[tablename])
6455 string_query = "(%s)" % query
6456 result, data = self.connection.search(None, string_query)
6457 store_list = [item.strip() for item in data[0].split()
6458 if item.strip().isdigit()]
6459
6460 for number in store_list:
6461 result = None
6462 if len(mark) > 0:
6463 commands.append((number, "+FLAGS", "(%s)" % " ".join(mark)))
6464 if len(unmark) > 0:
6465 commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark)))
6466 return commands
6467
6468 - def update(self, tablename, query, fields):
6469 rowcount = 0
6470 commands = self._update(tablename, query, fields)
6471 for command in commands:
6472 result, data = self.connection.store(*command)
6473 if result == "OK":
6474 rowcount += 1
6475 else:
6476 raise Exception("IMAP storing error: %s" % data)
6477 return rowcount
6478
6479 - def _count(self, query, distinct=None):
6480 raise NotImplementedError()
6481
6482 - def count(self,query,distinct=None):
6494
6495 - def delete(self, tablename, query):
6496 counter = 0
6497 if query:
6498 if use_common_filters(query):
6499 query = self.common_filter(query, [tablename,])
6500 result, data = self.connection.select(self.connection.mailbox_names[tablename])
6501 string_query = "(%s)" % query
6502 result, data = self.connection.search(None, string_query)
6503 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
6504 for number in store_list:
6505 result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
6506 if result == "OK":
6507 counter += 1
6508 else:
6509 raise Exception("IMAP store error: %s" % data)
6510 if counter > 0:
6511 result, data = self.connection.expunge()
6512 return counter
6513
6514 - def BELONGS(self, first, second):
6515 result = None
6516 name = self.search_fields[first.name]
6517 if name == "MESSAGE":
6518 values = [str(val) for val in second if str(val).isdigit()]
6519 result = "%s" % ",".join(values).strip()
6520
6521 elif name == "UID":
6522 values = [str(val) for val in second if str(val).isdigit()]
6523 result = "UID %s" % ",".join(values).strip()
6524
6525 else:
6526 raise Exception("Operation not supported")
6527
6528 return result
6529
6530 - def CONTAINS(self, first, second, case_sensitive=False):
6531
6532 result = None
6533 name = self.search_fields[first.name]
6534
6535 if name in ("FROM", "TO", "SUBJECT", "TEXT"):
6536 result = "%s \"%s\"" % (name, self.expand(second))
6537 else:
6538 if first.name in ("cc", "bcc"):
6539 result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
6540 elif first.name == "mime":
6541 result = "HEADER Content-Type \"%s\"" % self.expand(second)
6542 else:
6543 raise Exception("Operation not supported")
6544 return result
6545
6546 - def GT(self, first, second):
6547 result = None
6548 name = self.search_fields[first.name]
6549 if name == "MESSAGE":
6550 last_message = self.get_last_message(first.tablename)
6551 result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
6552 elif name == "UID":
6553
6554
6555
6556 try:
6557 pedestal, threshold = self.get_uid_bounds(first.tablename)
6558 except TypeError:
6559 e = sys.exc_info()[1]
6560 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6561 return ""
6562 try:
6563 lower_limit = int(self.expand(second)) + 1
6564 except (ValueError, TypeError):
6565 e = sys.exc_info()[1]
6566 raise Exception("Operation not supported (non integer UID)")
6567 result = "UID %s:%s" % (lower_limit, threshold)
6568 elif name == "DATE":
6569 result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
6570 elif name == "SIZE":
6571 result = "LARGER %s" % self.expand(second)
6572 else:
6573 raise Exception("Operation not supported")
6574 return result
6575
6576 - def GE(self, first, second):
6577 result = None
6578 name = self.search_fields[first.name]
6579 if name == "MESSAGE":
6580 last_message = self.get_last_message(first.tablename)
6581 result = "%s:%s" % (self.expand(second), last_message)
6582 elif name == "UID":
6583
6584
6585
6586 try:
6587 pedestal, threshold = self.get_uid_bounds(first.tablename)
6588 except TypeError:
6589 e = sys.exc_info()[1]
6590 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6591 return ""
6592 lower_limit = self.expand(second)
6593 result = "UID %s:%s" % (lower_limit, threshold)
6594 elif name == "DATE":
6595 result = "SINCE %s" % self.convert_date(second)
6596 else:
6597 raise Exception("Operation not supported")
6598 return result
6599
6600 - def LT(self, first, second):
6601 result = None
6602 name = self.search_fields[first.name]
6603 if name == "MESSAGE":
6604 result = "%s:%s" % (1, int(self.expand(second)) - 1)
6605 elif name == "UID":
6606 try:
6607 pedestal, threshold = self.get_uid_bounds(first.tablename)
6608 except TypeError:
6609 e = sys.exc_info()[1]
6610 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6611 return ""
6612 try:
6613 upper_limit = int(self.expand(second)) - 1
6614 except (ValueError, TypeError):
6615 e = sys.exc_info()[1]
6616 raise Exception("Operation not supported (non integer UID)")
6617 result = "UID %s:%s" % (pedestal, upper_limit)
6618 elif name == "DATE":
6619 result = "BEFORE %s" % self.convert_date(second)
6620 elif name == "SIZE":
6621 result = "SMALLER %s" % self.expand(second)
6622 else:
6623 raise Exception("Operation not supported")
6624 return result
6625
6626 - def LE(self, first, second):
6627 result = None
6628 name = self.search_fields[first.name]
6629 if name == "MESSAGE":
6630 result = "%s:%s" % (1, self.expand(second))
6631 elif name == "UID":
6632 try:
6633 pedestal, threshold = self.get_uid_bounds(first.tablename)
6634 except TypeError:
6635 e = sys.exc_info()[1]
6636 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6637 return ""
6638 upper_limit = int(self.expand(second))
6639 result = "UID %s:%s" % (pedestal, upper_limit)
6640 elif name == "DATE":
6641 result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
6642 else:
6643 raise Exception("Operation not supported")
6644 return result
6645
6646 - def NE(self, first, second=None):
6647 if (second is None) and isinstance(first, Field):
6648
6649 if first.type == "id":
6650 return self.GE(first, 1)
6651 result = self.NOT(self.EQ(first, second))
6652 result = result.replace("NOT NOT", "").strip()
6653 return result
6654
6655 - def EQ(self,first,second):
6656 name = self.search_fields[first.name]
6657 result = None
6658 if name is not None:
6659 if name == "MESSAGE":
6660
6661 result = "%s" % self.expand(second)
6662 elif name == "UID":
6663 result = "UID %s" % self.expand(second)
6664 elif name == "DATE":
6665 result = "ON %s" % self.convert_date(second)
6666
6667 elif name in self.flags:
6668 if second:
6669 result = "%s" % (name.upper()[1:])
6670 else:
6671 result = "NOT %s" % (name.upper()[1:])
6672 else:
6673 raise Exception("Operation not supported")
6674 else:
6675 raise Exception("Operation not supported")
6676 return result
6677
6678 - def AND(self, first, second):
6681
6682 - def OR(self, first, second):
6685
6686 - def NOT(self, first):
6687 result = "NOT %s" % self.expand(first)
6688 return result
6689
6690
6691
6692
6693
6694 ADAPTERS = {
6695 'sqlite': SQLiteAdapter,
6696 'spatialite': SpatiaLiteAdapter,
6697 'sqlite:memory': SQLiteAdapter,
6698 'spatialite:memory': SpatiaLiteAdapter,
6699 'mysql': MySQLAdapter,
6700 'postgres': PostgreSQLAdapter,
6701 'postgres:psycopg2': PostgreSQLAdapter,
6702 'postgres:pg8000': PostgreSQLAdapter,
6703 'postgres2:psycopg2': NewPostgreSQLAdapter,
6704 'postgres2:pg8000': NewPostgreSQLAdapter,
6705 'oracle': OracleAdapter,
6706 'mssql': MSSQLAdapter,
6707 'mssql2': MSSQL2Adapter,
6708 'mssql3': MSSQL3Adapter,
6709 'vertica': VerticaAdapter,
6710 'sybase': SybaseAdapter,
6711 'db2': DB2Adapter,
6712 'teradata': TeradataAdapter,
6713 'informix': InformixAdapter,
6714 'informix-se': InformixSEAdapter,
6715 'firebird': FireBirdAdapter,
6716 'firebird_embedded': FireBirdAdapter,
6717 'ingres': IngresAdapter,
6718 'ingresu': IngresUnicodeAdapter,
6719 'sapdb': SAPDBAdapter,
6720 'cubrid': CubridAdapter,
6721 'jdbc:sqlite': JDBCSQLiteAdapter,
6722 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
6723 'jdbc:postgres': JDBCPostgreSQLAdapter,
6724 'gae': GoogleDatastoreAdapter,
6725 'google:datastore': GoogleDatastoreAdapter,
6726 'google:sql': GoogleSQLAdapter,
6727 'couchdb': CouchDBAdapter,
6728 'mongodb': MongoDBAdapter,
6729 'imap': IMAPAdapter
6730 }
6733 """
6734 Field type validation, using web2py's validators mechanism.
6735
6736 makes sure the content of a field is in line with the declared
6737 fieldtype
6738 """
6739 db = field.db
6740 if not have_validators:
6741 return []
6742 field_type, field_length = field.type, field.length
6743 if isinstance(field_type, SQLCustomType):
6744 if hasattr(field_type, 'validator'):
6745 return field_type.validator
6746 else:
6747 field_type = field_type.type
6748 elif not isinstance(field_type,str):
6749 return []
6750 requires=[]
6751 def ff(r,id):
6752 row=r(id)
6753 if not row:
6754 return id
6755 elif hasattr(r, '_format') and isinstance(r._format,str):
6756 return r._format % row
6757 elif hasattr(r, '_format') and callable(r._format):
6758 return r._format(row)
6759 else:
6760 return id
6761 if field_type in (('string', 'text', 'password')):
6762 requires.append(validators.IS_LENGTH(field_length))
6763 elif field_type == 'json':
6764 requires.append(validators.IS_EMPTY_OR(validators.IS_JSON()))
6765 elif field_type == 'double' or field_type == 'float':
6766 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
6767 elif field_type in ('integer','bigint'):
6768 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
6769 elif field_type.startswith('decimal'):
6770 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
6771 elif field_type == 'date':
6772 requires.append(validators.IS_DATE())
6773 elif field_type == 'time':
6774 requires.append(validators.IS_TIME())
6775 elif field_type == 'datetime':
6776 requires.append(validators.IS_DATETIME())
6777 elif db and field_type.startswith('reference') and \
6778 field_type.find('.') < 0 and \
6779 field_type[10:] in db.tables:
6780 referenced = db[field_type[10:]]
6781 def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
6782 field.represent = field.represent or repr_ref
6783 if hasattr(referenced, '_format') and referenced._format:
6784 requires = validators.IS_IN_DB(db,referenced._id,
6785 referenced._format)
6786 if field.unique:
6787 requires._and = validators.IS_NOT_IN_DB(db,field)
6788 if field.tablename == field_type[10:]:
6789 return validators.IS_EMPTY_OR(requires)
6790 return requires
6791 elif db and field_type.startswith('list:reference') and \
6792 field_type.find('.') < 0 and \
6793 field_type[15:] in db.tables:
6794 referenced = db[field_type[15:]]
6795 def list_ref_repr(ids, row=None, r=referenced, f=ff):
6796 if not ids:
6797 return None
6798 refs = None
6799 db, id = r._db, r._id
6800 if isinstance(db._adapter, GoogleDatastoreAdapter):
6801 def count(values): return db(id.belongs(values)).select(id)
6802 rx = range(0, len(ids), 30)
6803 refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
6804 else:
6805 refs = db(id.belongs(ids)).select(id)
6806 return (refs and ', '.join(str(f(r,x.id)) for x in refs) or '')
6807 field.represent = field.represent or list_ref_repr
6808 if hasattr(referenced, '_format') and referenced._format:
6809 requires = validators.IS_IN_DB(db,referenced._id,
6810 referenced._format,multiple=True)
6811 else:
6812 requires = validators.IS_IN_DB(db,referenced._id,
6813 multiple=True)
6814 if field.unique:
6815 requires._and = validators.IS_NOT_IN_DB(db,field)
6816 return requires
6817 elif field_type.startswith('list:'):
6818 def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
6819 field.represent = field.represent or repr_list
6820 if field.unique:
6821 requires.insert(0,validators.IS_NOT_IN_DB(db,field))
6822 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
6823 if field.notnull and not field_type[:2] in sff:
6824 requires.insert(0, validators.IS_NOT_EMPTY())
6825 elif not field.notnull and field_type[:2] in sff and requires:
6826 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
6827 return requires
6828
6831 return str(item).replace('|', '||')
6832
6835
6837 if not hasattr(value,'split') and hasattr(value,'read'):
6838 value = value.read()
6839 return [long(x) for x in value.split('|') if x.strip()]
6840
6844
6845
6846 -class Row(object):
6847
6848 """
6849 a dictionary that lets you do d['a'] as well as d.a
6850 this is only used to store a Row
6851 """
6852
6854 self.__dict__.update(*args,**kwargs)
6855
6857 key=str(key)
6858 m = REGEX_TABLE_DOT_FIELD.match(key)
6859 if key in self.get('_extra',{}):
6860 return self._extra[key]
6861 elif m:
6862 try:
6863 return ogetattr(self, m.group(1))[m.group(2)]
6864 except (KeyError,AttributeError,TypeError):
6865 key = m.group(2)
6866 return ogetattr(self, key)
6867
6869 setattr(self, str(key), value)
6870
6871 __delitem__ = delattr
6872
6873 __copy__ = lambda self: Row(self)
6874
6875 __call__ = __getitem__
6876
6877 - def get(self,key,default=None):
6878 return self.__dict__.get(key,default)
6879
6881 return key in self.__dict__
6882
6883 has_key = __contains__
6884
6886 return len(self.__dict__)>0
6887
6888 - def update(self, *args, **kwargs):
6889 self.__dict__.update(*args, **kwargs)
6890
6892 return self.__dict__.keys()
6893
6895 return self.__dict__.items()
6896
6898 return self.__dict__.values()
6899
6902
6905
6907
6908 return '<Row %s>' % self.as_dict()
6909
6911 return '<Row %s>' % self.as_dict()
6912
6914 return object.__getattribute__(self,'id')
6915
6917 return long(object.__getattribute__(self,'id'))
6918
6920 try:
6921 return self.as_dict() == other.as_dict()
6922 except AttributeError:
6923 return False
6924
6926 return not (self == other)
6927
6929 return Row(dict(self))
6930
6931 - def as_dict(self, datetime_to_str=False, custom_types=None):
6932 SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict]
6933 if isinstance(custom_types,(list,tuple,set)):
6934 SERIALIZABLE_TYPES += custom_types
6935 elif custom_types:
6936 SERIALIZABLE_TYPES.append(custom_types)
6937 d = dict(self)
6938 for k in copy.copy(d.keys()):
6939 v=d[k]
6940 if d[k] is None:
6941 continue
6942 elif isinstance(v,Row):
6943 d[k]=v.as_dict()
6944 elif isinstance(v,Reference):
6945 d[k]=long(v)
6946 elif isinstance(v,decimal.Decimal):
6947 d[k]=float(v)
6948 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
6949 if datetime_to_str:
6950 d[k] = v.isoformat().replace('T',' ')[:19]
6951 elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
6952 del d[k]
6953 return d
6954
6955 - def as_xml(self, row_name="row", colnames=None, indent=' '):
6956 def f(row,field,indent=' '):
6957 if isinstance(row,Row):
6958 spc = indent+' \n'
6959 items = [f(row[x],x,indent+' ') for x in row]
6960 return '%s<%s>\n%s\n%s</%s>' % (
6961 indent,
6962 field,
6963 spc.join(item for item in items if item),
6964 indent,
6965 field)
6966 elif not callable(row):
6967 if REGEX_ALPHANUMERIC.match(field):
6968 return '%s<%s>%s</%s>' % (indent,field,row,field)
6969 else:
6970 return '%s<extra name="%s">%s</extra>' % \
6971 (indent,field,row)
6972 else:
6973 return None
6974 return f(self, row_name, indent=indent)
6975
6976 - def as_json(self, mode="object", default=None, colnames=None,
6977 serialize=True, **kwargs):
6978 """
6979 serializes the table to a JSON list of objects
6980 kwargs are passed to .as_dict method
6981 only "object" mode supported for single row
6982
6983 serialize = False used by Rows.as_json
6984 TODO: return array mode with query column order
6985 """
6986
6987 def inner_loop(record, col):
6988 (t, f) = col.split('.')
6989 res = None
6990 if not REGEX_TABLE_DOT_FIELD.match(col):
6991 key = col
6992 res = record._extra[col]
6993 else:
6994 key = f
6995 if isinstance(record.get(t, None), Row):
6996 res = record[t][f]
6997 else:
6998 res = record[f]
6999 if mode == 'object':
7000 return (key, res)
7001 else:
7002 return res
7003
7004 multi = any([isinstance(v, self.__class__) for v in self.values()])
7005 mode = mode.lower()
7006 if not mode in ['object', 'array']:
7007 raise SyntaxError('Invalid JSON serialization mode: %s' % mode)
7008
7009 if mode=='object' and colnames:
7010 item = dict([inner_loop(self, col) for col in colnames])
7011 elif colnames:
7012 item = [inner_loop(self, col) for col in colnames]
7013 else:
7014 if not mode == 'object':
7015 raise SyntaxError('Invalid JSON serialization mode: %s' % mode)
7016
7017 if multi:
7018 item = dict()
7019 [item.update(**v.as_dict(**kwargs)) for v in self.values()]
7020 else:
7021 item = self.as_dict(**kwargs)
7022
7023 if serialize:
7024 if have_serializers:
7025 return serializers.json(item,
7026 default=default or
7027 serializers.custom_json)
7028 elif simplejson:
7029 return simplejson.dumps(item)
7030 else:
7031 raise RuntimeError("missing simplejson")
7032 else:
7033 return item
7034
7044
7046 if not isinstance(fields,(list,tuple)):
7047 fields = [fields]
7048 new_fields = []
7049 for field in fields:
7050 if isinstance(field,Field):
7051 new_fields.append(field)
7052 elif isinstance(field,Table):
7053 for ofield in field:
7054 new_fields.append(ofield)
7055 else:
7056 raise RuntimeError("fields must be a list of fields")
7057 fields = new_fields
7058 field_map = {}
7059 for field in fields:
7060 n = field.name.lower()
7061 if not n in field_map:
7062 field_map[n] = field
7063 n = str(field).lower()
7064 if not n in field_map:
7065 field_map[n] = field
7066 constants = {}
7067 i = 0
7068 while True:
7069 m = REGEX_CONST_STRING.search(text)
7070 if not m: break
7071 text = text[:m.start()]+('#%i' % i)+text[m.end():]
7072 constants[str(i)] = m.group()[1:-1]
7073 i+=1
7074 text = re.sub('\s+',' ',text).lower()
7075 for a,b in [('&','and'),
7076 ('|','or'),
7077 ('~','not'),
7078 ('==','='),
7079 ('<','<'),
7080 ('>','>'),
7081 ('<=','<='),
7082 ('>=','>='),
7083 ('<>','!='),
7084 ('=<','<='),
7085 ('=>','>='),
7086 ('=','='),
7087 (' less or equal than ','<='),
7088 (' greater or equal than ','>='),
7089 (' equal or less than ','<='),
7090 (' equal or greater than ','>='),
7091 (' less or equal ','<='),
7092 (' greater or equal ','>='),
7093 (' equal or less ','<='),
7094 (' equal or greater ','>='),
7095 (' not equal to ','!='),
7096 (' not equal ','!='),
7097 (' equal to ','='),
7098 (' equal ','='),
7099 (' equals ','='),
7100 (' less than ','<'),
7101 (' greater than ','>'),
7102 (' starts with ','startswith'),
7103 (' ends with ','endswith'),
7104 (' not in ' , 'notbelongs'),
7105 (' in ' , 'belongs'),
7106 (' is ','=')]:
7107 if a[0]==' ':
7108 text = text.replace(' is'+a,' %s ' % b)
7109 text = text.replace(a,' %s ' % b)
7110 text = re.sub('\s+',' ',text).lower()
7111 text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
7112 query = field = neg = op = logic = None
7113 for item in text.split():
7114 if field is None:
7115 if item == 'not':
7116 neg = True
7117 elif not neg and not logic and item in ('and','or'):
7118 logic = item
7119 elif item in field_map:
7120 field = field_map[item]
7121 else:
7122 raise RuntimeError("Invalid syntax")
7123 elif not field is None and op is None:
7124 op = item
7125 elif not op is None:
7126 if item.startswith('#'):
7127 if not item[1:] in constants:
7128 raise RuntimeError("Invalid syntax")
7129 value = constants[item[1:]]
7130 else:
7131 value = item
7132 if field.type in ('text', 'string', 'json'):
7133 if op == '=': op = 'like'
7134 if op == '=': new_query = field==value
7135 elif op == '<': new_query = field<value
7136 elif op == '>': new_query = field>value
7137 elif op == '<=': new_query = field<=value
7138 elif op == '>=': new_query = field>=value
7139 elif op == '!=': new_query = field!=value
7140 elif op == 'belongs': new_query = field.belongs(value.split(','))
7141 elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
7142 elif field.type in ('text', 'string', 'json'):
7143 if op == 'contains': new_query = field.contains(value)
7144 elif op == 'like': new_query = field.like(value)
7145 elif op == 'startswith': new_query = field.startswith(value)
7146 elif op == 'endswith': new_query = field.endswith(value)
7147 else: raise RuntimeError("Invalid operation")
7148 elif field._db._adapter.dbengine=='google:datastore' and \
7149 field.type in ('list:integer', 'list:string', 'list:reference'):
7150 if op == 'contains': new_query = field.contains(value)
7151 else: raise RuntimeError("Invalid operation")
7152 else: raise RuntimeError("Invalid operation")
7153 if neg: new_query = ~new_query
7154 if query is None:
7155 query = new_query
7156 elif logic == 'and':
7157 query &= new_query
7158 elif logic == 'or':
7159 query |= new_query
7160 field = op = neg = logic = None
7161 return query
7162
7164
7165 """
7166 an instance of this class represents a database connection
7167
7168 Example::
7169
7170 db = DAL('sqlite://test.db')
7171
7172 or
7173
7174 db = DAL({"uri": ..., "items": ...}) # experimental
7175
7176 db.define_table('tablename', Field('fieldname1'),
7177 Field('fieldname2'))
7178 """
7179
7180 - def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
7207
7208 @staticmethod
7210 """
7211 # ## this allows gluon to set a folder for this thread
7212 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
7213 """
7214 BaseAdapter.set_folder(folder)
7215
7216 @staticmethod
7218 """
7219 Returns a dictionary with uri as key with timings and defined tables
7220 {'sqlite://storage.sqlite': {
7221 'dbstats': [(select auth_user.email from auth_user, 0.02009)],
7222 'dbtables': {
7223 'defined': ['auth_cas', 'auth_event', 'auth_group',
7224 'auth_membership', 'auth_permission', 'auth_user'],
7225 'lazy': '[]'
7226 }
7227 }
7228 }
7229 """
7230 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
7231 infos = {}
7232 for db_uid, db_group in dbs:
7233 for db in db_group:
7234 if not db._uri:
7235 continue
7236 k = hide_password(db._uri)
7237 infos[k] = dict(dbstats = [(row[0], row[1]) for row in db._timings],
7238 dbtables = {'defined':
7239 sorted(list(set(db.tables) -
7240 set(db._LAZY_TABLES.keys()))),
7241 'lazy': sorted(db._LAZY_TABLES.keys())}
7242 )
7243 return infos
7244
7245 @staticmethod
7258
7259 @staticmethod
7281
7282 - def __init__(self, uri=DEFAULT_URI,
7283 pool_size=0, folder=None,
7284 db_codec='UTF-8', check_reserved=None,
7285 migrate=True, fake_migrate=False,
7286 migrate_enabled=True, fake_migrate_all=False,
7287 decode_credentials=False, driver_args=None,
7288 adapter_args=None, attempts=5, auto_import=False,
7289 bigint_id=False,debug=False,lazy_tables=False,
7290 db_uid=None, do_connect=True, after_connection=None):
7291 """
7292 Creates a new Database Abstraction Layer instance.
7293
7294 Keyword arguments:
7295
7296 :uri: string that contains information for connecting to a database.
7297 (default: 'sqlite://dummy.db')
7298
7299 experimental: you can specify a dictionary as uri
7300 parameter i.e. with
7301 db = DAL({"uri": "sqlite://storage.sqlite",
7302 "items": {...}, ...})
7303
7304 for an example of dict input you can check the output
7305 of the scaffolding db model with
7306
7307 db.as_dict()
7308
7309 Note that for compatibility with Python older than
7310 version 2.6.5 you should cast your dict input keys
7311 to str due to a syntax limitation on kwarg names.
7312 for proper DAL dictionary input you can use one of:
7313
7314 obj = serializers.cast_keys(dict, [encoding="utf-8"])
7315
7316 or else (for parsing json input)
7317
7318 obj = serializers.loads_json(data, unicode_keys=False)
7319
7320 :pool_size: How many open connections to make to the database object.
7321 :folder: where .table files will be created.
7322 automatically set within web2py
7323 use an explicit path when using DAL outside web2py
7324 :db_codec: string encoding of the database (default: 'UTF-8')
7325 :check_reserved: list of adapters to check tablenames and column names
7326 against sql/nosql reserved keywords. (Default None)
7327
7328 * 'common' List of sql keywords that are common to all database types
7329 such as "SELECT, INSERT". (recommended)
7330 * 'all' Checks against all known SQL keywords. (not recommended)
7331 <adaptername> Checks against the specific adapters list of keywords
7332 (recommended)
7333 * '<adaptername>_nonreserved' Checks against the specific adapters
7334 list of nonreserved keywords. (if available)
7335 :migrate (defaults to True) sets default migrate behavior for all tables
7336 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
7337 :migrate_enabled (defaults to True). If set to False disables ALL migrations
7338 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
7339 :attempts (defaults to 5). Number of times to attempt connecting
7340 :auto_import (defaults to False). If set, import automatically table definitions from the
7341 databases folder
7342 :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields
7343 :lazy_tables (defaults to False): delay table definition until table access
7344 :after_connection (defaults to None): a callable that will be execute after the connection
7345 """
7346
7347 items = None
7348 if isinstance(uri, dict):
7349 if "items" in uri:
7350 items = uri.pop("items")
7351 try:
7352 newuri = uri.pop("uri")
7353 except KeyError:
7354 newuri = DEFAULT_URI
7355 locals().update(uri)
7356 uri = newuri
7357
7358 if uri == '<zombie>' and db_uid is not None: return
7359 if not decode_credentials:
7360 credential_decoder = lambda cred: cred
7361 else:
7362 credential_decoder = lambda cred: urllib.unquote(cred)
7363 self._folder = folder
7364 if folder:
7365 self.set_folder(folder)
7366 self._uri = uri
7367 self._pool_size = pool_size
7368 self._db_codec = db_codec
7369 self._lastsql = ''
7370 self._timings = []
7371 self._pending_references = {}
7372 self._request_tenant = 'request_tenant'
7373 self._common_fields = []
7374 self._referee_name = '%(table)s'
7375 self._bigint_id = bigint_id
7376 self._debug = debug
7377 self._migrated = []
7378 self._LAZY_TABLES = {}
7379 self._lazy_tables = lazy_tables
7380 self._tables = SQLCallableList()
7381 self._driver_args = driver_args
7382 self._adapter_args = adapter_args
7383 self._check_reserved = check_reserved
7384 self._decode_credentials = decode_credentials
7385 self._attempts = attempts
7386 self._do_connect = do_connect
7387
7388 if not str(attempts).isdigit() or attempts < 0:
7389 attempts = 5
7390 if uri:
7391 uris = isinstance(uri,(list,tuple)) and uri or [uri]
7392 error = ''
7393 connected = False
7394 for k in range(attempts):
7395 for uri in uris:
7396 try:
7397 if is_jdbc and not uri.startswith('jdbc:'):
7398 uri = 'jdbc:'+uri
7399 self._dbname = REGEX_DBNAME.match(uri).group()
7400 if not self._dbname in ADAPTERS:
7401 raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname)
7402
7403
7404 kwargs = dict(db=self,uri=uri,
7405 pool_size=pool_size,
7406 folder=folder,
7407 db_codec=db_codec,
7408 credential_decoder=credential_decoder,
7409 driver_args=driver_args or {},
7410 adapter_args=adapter_args or {},
7411 do_connect=do_connect,
7412 after_connection=after_connection)
7413 self._adapter = ADAPTERS[self._dbname](**kwargs)
7414 types = ADAPTERS[self._dbname].types
7415
7416 self._adapter.types = copy.copy(types)
7417 if bigint_id:
7418 if 'big-id' in types and 'reference' in types:
7419 self._adapter.types['id'] = types['big-id']
7420 self._adapter.types['reference'] = types['big-reference']
7421 connected = True
7422 break
7423 except SyntaxError:
7424 raise
7425 except Exception:
7426 tb = traceback.format_exc()
7427 sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
7428 if connected:
7429 break
7430 else:
7431 time.sleep(1)
7432 if not connected:
7433 raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb))
7434 else:
7435 self._adapter = BaseAdapter(db=self,pool_size=0,
7436 uri='None',folder=folder,
7437 db_codec=db_codec, after_connection=after_connection)
7438 migrate = fake_migrate = False
7439 adapter = self._adapter
7440 self._uri_hash = hashlib_md5(adapter.uri).hexdigest()
7441 self.check_reserved = check_reserved
7442 if self.check_reserved:
7443 from reserved_sql_keywords import ADAPTERS as RSK
7444 self.RSK = RSK
7445 self._migrate = migrate
7446 self._fake_migrate = fake_migrate
7447 self._migrate_enabled = migrate_enabled
7448 self._fake_migrate_all = fake_migrate_all
7449 if auto_import or items:
7450 self.import_table_definitions(adapter.folder,
7451 items=items)
7452
7453 @property
7456
7459 pattern = pjoin(path,self._uri_hash+'_*.table')
7460 if items:
7461 for tablename, table in items.iteritems():
7462
7463 fields = []
7464
7465 [table.pop(name) for name in ("name", "fields") if
7466 name in table]
7467 if "items" in table:
7468 for fieldname, field in table.pop("items").iteritems():
7469
7470 [field.pop(key) for key in ("requires", "name",
7471 "compute", "colname") if key in field]
7472 fields.append(Field(str(fieldname), **field))
7473 self.define_table(str(tablename), *fields, **table)
7474 else:
7475 for filename in glob.glob(pattern):
7476 tfile = self._adapter.file_open(filename, 'r')
7477 try:
7478 sql_fields = pickle.load(tfile)
7479 name = filename[len(pattern)-7:-6]
7480 mf = [(value['sortable'],
7481 Field(key,
7482 type=value['type'],
7483 length=value.get('length',None),
7484 notnull=value.get('notnull',False),
7485 unique=value.get('unique',False))) \
7486 for key, value in sql_fields.iteritems()]
7487 mf.sort(lambda a,b: cmp(a[0],b[0]))
7488 self.define_table(name,*[item[1] for item in mf],
7489 **dict(migrate=migrate,
7490 fake_migrate=fake_migrate))
7491 finally:
7492 self._adapter.file_close(tfile)
7493
7495 """
7496 Validates ``name`` against SQL keywords
7497 Uses self.check_reserve which is a list of
7498 operators to use.
7499 self.check_reserved
7500 ['common', 'postgres', 'mysql']
7501 self.check_reserved
7502 ['all']
7503 """
7504 for backend in self.check_reserved:
7505 if name.upper() in self.RSK[backend]:
7506 raise SyntaxError(
7507 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
7508
7509 - def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
7510 """
7511 EXAMPLE:
7512
7513 db.define_table('person',Field('name'),Field('info'))
7514 db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info'))
7515
7516 @request.restful()
7517 def index():
7518 def GET(*args,**vars):
7519 patterns = [
7520 "/friends[person]",
7521 "/{person.name}/:field",
7522 "/{person.name}/pets[pet.ownedby]",
7523 "/{person.name}/pets[pet.ownedby]/{pet.name}",
7524 "/{person.name}/pets[pet.ownedby]/{pet.name}/:field",
7525 ("/dogs[pet]", db.pet.info=='dog'),
7526 ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'),
7527 ]
7528 parser = db.parse_as_rest(patterns,args,vars)
7529 if parser.status == 200:
7530 return dict(content=parser.response)
7531 else:
7532 raise HTTP(parser.status,parser.error)
7533
7534 def POST(table_name,**vars):
7535 if table_name == 'person':
7536 return db.person.validate_and_insert(**vars)
7537 elif table_name == 'pet':
7538 return db.pet.validate_and_insert(**vars)
7539 else:
7540 raise HTTP(400)
7541 return locals()
7542 """
7543
7544 db = self
7545 re1 = REGEX_SEARCH_PATTERN
7546 re2 = REGEX_SQUARE_BRACKETS
7547
7548 def auto_table(table,base='',depth=0):
7549 patterns = []
7550 for field in db[table].fields:
7551 if base:
7552 tag = '%s/%s' % (base,field.replace('_','-'))
7553 else:
7554 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
7555 f = db[table][field]
7556 if not f.readable: continue
7557 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
7558 tag += '/{%s.%s}' % (table,field)
7559 patterns.append(tag)
7560 patterns.append(tag+'/:field')
7561 elif f.type.startswith('boolean'):
7562 tag += '/{%s.%s}' % (table,field)
7563 patterns.append(tag)
7564 patterns.append(tag+'/:field')
7565 elif f.type in ('float','double','integer','bigint'):
7566 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
7567 patterns.append(tag)
7568 patterns.append(tag+'/:field')
7569 elif f.type.startswith('list:'):
7570 tag += '/{%s.%s.contains}' % (table,field)
7571 patterns.append(tag)
7572 patterns.append(tag+'/:field')
7573 elif f.type in ('date','datetime'):
7574 tag+= '/{%s.%s.year}' % (table,field)
7575 patterns.append(tag)
7576 patterns.append(tag+'/:field')
7577 tag+='/{%s.%s.month}' % (table,field)
7578 patterns.append(tag)
7579 patterns.append(tag+'/:field')
7580 tag+='/{%s.%s.day}' % (table,field)
7581 patterns.append(tag)
7582 patterns.append(tag+'/:field')
7583 if f.type in ('datetime','time'):
7584 tag+= '/{%s.%s.hour}' % (table,field)
7585 patterns.append(tag)
7586 patterns.append(tag+'/:field')
7587 tag+='/{%s.%s.minute}' % (table,field)
7588 patterns.append(tag)
7589 patterns.append(tag+'/:field')
7590 tag+='/{%s.%s.second}' % (table,field)
7591 patterns.append(tag)
7592 patterns.append(tag+'/:field')
7593 if depth>0:
7594 for f in db[table]._referenced_by:
7595 tag+='/%s[%s.%s]' % (table,f.tablename,f.name)
7596 patterns.append(tag)
7597 patterns += auto_table(table,base=tag,depth=depth-1)
7598 return patterns
7599
7600 if patterns == 'auto':
7601 patterns=[]
7602 for table in db.tables:
7603 if not table.startswith('auth_'):
7604 patterns.append('/%s[%s]' % (table,table))
7605 patterns += auto_table(table,base='',depth=1)
7606 else:
7607 i = 0
7608 while i<len(patterns):
7609 pattern = patterns[i]
7610 if not isinstance(pattern,str):
7611 pattern = pattern[0]
7612 tokens = pattern.split('/')
7613 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
7614 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
7615 '/'.join(tokens[:-1]))
7616 patterns = patterns[:i]+new_patterns+patterns[i+1:]
7617 i += len(new_patterns)
7618 else:
7619 i += 1
7620 if '/'.join(args) == 'patterns':
7621 return Row({'status':200,'pattern':'list',
7622 'error':None,'response':patterns})
7623 for pattern in patterns:
7624 basequery, exposedfields = None, []
7625 if isinstance(pattern,tuple):
7626 if len(pattern)==2:
7627 pattern, basequery = pattern
7628 elif len(pattern)>2:
7629 pattern, basequery, exposedfields = pattern[0:3]
7630 otable=table=None
7631 if not isinstance(queries,dict):
7632 dbset=db(queries)
7633 if basequery is not None:
7634 dbset = dbset(basequery)
7635 i=0
7636 tags = pattern[1:].split('/')
7637 if len(tags)!=len(args):
7638 continue
7639 for tag in tags:
7640 if re1.match(tag):
7641
7642 tokens = tag[1:-1].split('.')
7643 table, field = tokens[0], tokens[1]
7644 if not otable or table == otable:
7645 if len(tokens)==2 or tokens[2]=='eq':
7646 query = db[table][field]==args[i]
7647 elif tokens[2]=='ne':
7648 query = db[table][field]!=args[i]
7649 elif tokens[2]=='lt':
7650 query = db[table][field]<args[i]
7651 elif tokens[2]=='gt':
7652 query = db[table][field]>args[i]
7653 elif tokens[2]=='ge':
7654 query = db[table][field]>=args[i]
7655 elif tokens[2]=='le':
7656 query = db[table][field]<=args[i]
7657 elif tokens[2]=='year':
7658 query = db[table][field].year()==args[i]
7659 elif tokens[2]=='month':
7660 query = db[table][field].month()==args[i]
7661 elif tokens[2]=='day':
7662 query = db[table][field].day()==args[i]
7663 elif tokens[2]=='hour':
7664 query = db[table][field].hour()==args[i]
7665 elif tokens[2]=='minute':
7666 query = db[table][field].minutes()==args[i]
7667 elif tokens[2]=='second':
7668 query = db[table][field].seconds()==args[i]
7669 elif tokens[2]=='startswith':
7670 query = db[table][field].startswith(args[i])
7671 elif tokens[2]=='contains':
7672 query = db[table][field].contains(args[i])
7673 else:
7674 raise RuntimeError("invalid pattern: %s" % pattern)
7675 if len(tokens)==4 and tokens[3]=='not':
7676 query = ~query
7677 elif len(tokens)>=4:
7678 raise RuntimeError("invalid pattern: %s" % pattern)
7679 if not otable and isinstance(queries,dict):
7680 dbset = db(queries[table])
7681 if basequery is not None:
7682 dbset = dbset(basequery)
7683 dbset=dbset(query)
7684 else:
7685 raise RuntimeError("missing relation in pattern: %s" % pattern)
7686 elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
7687 ref = tag[tag.find('[')+1:-1]
7688 if '.' in ref and otable:
7689 table,field = ref.split('.')
7690 selfld = '_id'
7691 if db[table][field].type.startswith('reference '):
7692 refs = [ x.name for x in db[otable] if x.type == db[table][field].type ]
7693 else:
7694 refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ]
7695 if refs:
7696 selfld = refs[0]
7697 if nested_select:
7698 try:
7699 dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld])))
7700 except ValueError:
7701 return Row({'status':400,'pattern':pattern,
7702 'error':'invalid path','response':None})
7703 else:
7704 items = [item.id for item in dbset.select(db[otable][selfld])]
7705 dbset=db(db[table][field].belongs(items))
7706 else:
7707 table = ref
7708 if not otable and isinstance(queries,dict):
7709 dbset = db(queries[table])
7710 dbset=dbset(db[table])
7711 elif tag==':field' and table:
7712
7713 field = args[i]
7714 if not field in db[table]: break
7715
7716 if not db[table][field].readable:
7717 return Row({'status':418,'pattern':pattern,
7718 'error':'I\'m a teapot','response':None})
7719 try:
7720 distinct = vars.get('distinct', False) == 'True'
7721 offset = long(vars.get('offset',None) or 0)
7722 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
7723 except ValueError:
7724 return Row({'status':400,'error':'invalid limits','response':None})
7725 items = dbset.select(db[table][field], distinct=distinct, limitby=limits)
7726 if items:
7727 return Row({'status':200,'response':items,
7728 'pattern':pattern})
7729 else:
7730 return Row({'status':404,'pattern':pattern,
7731 'error':'no record found','response':None})
7732 elif tag != args[i]:
7733 break
7734 otable = table
7735 i += 1
7736 if i==len(tags) and table:
7737 ofields = vars.get('order',db[table]._id.name).split('|')
7738 try:
7739 orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
7740 except (KeyError, AttributeError):
7741 return Row({'status':400,'error':'invalid orderby','response':None})
7742 if exposedfields:
7743 fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable]
7744 else:
7745 fields = [field for field in db[table] if field.readable]
7746 count = dbset.count()
7747 try:
7748 offset = long(vars.get('offset',None) or 0)
7749 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
7750 except ValueError:
7751 return Row({'status':400,'error':'invalid limits','response':None})
7752 if count > limits[1]-limits[0]:
7753 return Row({'status':400,'error':'too many records','response':None})
7754 try:
7755 response = dbset.select(limitby=limits,orderby=orderby,*fields)
7756 except ValueError:
7757 return Row({'status':400,'pattern':pattern,
7758 'error':'invalid path','response':None})
7759 return Row({'status':200,'response':response,
7760 'pattern':pattern,'count':count})
7761 return Row({'status':400,'error':'no matching pattern','response':None})
7762
7763 - def define_table(
7764 self,
7765 tablename,
7766 *fields,
7767 **args
7768 ):
7769 if not isinstance(tablename,str):
7770 raise SyntaxError("missing table name")
7771 elif hasattr(self,tablename) or tablename in self.tables:
7772 if not args.get('redefine',False):
7773 raise SyntaxError('table already defined: %s' % tablename)
7774 elif tablename.startswith('_') or hasattr(self,tablename) or \
7775 REGEX_PYTHON_KEYWORDS.match(tablename):
7776 raise SyntaxError('invalid table name: %s' % tablename)
7777 elif self.check_reserved:
7778 self.check_reserved_keyword(tablename)
7779 else:
7780 invalid_args = set(args)-TABLE_ARGS
7781 if invalid_args:
7782 raise SyntaxError('invalid table "%s" attributes: %s' \
7783 % (tablename,invalid_args))
7784 if self._lazy_tables and not tablename in self._LAZY_TABLES:
7785 self._LAZY_TABLES[tablename] = (tablename,fields,args)
7786 table = None
7787 else:
7788 table = self.lazy_define_table(tablename,*fields,**args)
7789 if not tablename in self.tables:
7790 self.tables.append(tablename)
7791 return table
7792
7793 - def lazy_define_table(
7794 self,
7795 tablename,
7796 *fields,
7797 **args
7798 ):
7799 args_get = args.get
7800 common_fields = self._common_fields
7801 if common_fields:
7802 fields = list(fields) + list(common_fields)
7803
7804 table_class = args_get('table_class',Table)
7805 table = table_class(self, tablename, *fields, **args)
7806 table._actual = True
7807 self[tablename] = table
7808
7809 table._create_references()
7810 for field in table:
7811 if field.requires == DEFAULT:
7812 field.requires = sqlhtml_validators(field)
7813
7814 migrate = self._migrate_enabled and args_get('migrate',self._migrate)
7815 if migrate and not self._uri in (None,'None') \
7816 or self._adapter.dbengine=='google:datastore':
7817 fake_migrate = self._fake_migrate_all or \
7818 args_get('fake_migrate',self._fake_migrate)
7819 polymodel = args_get('polymodel',None)
7820 try:
7821 GLOBAL_LOCKER.acquire()
7822 self._lastsql = self._adapter.create_table(
7823 table,migrate=migrate,
7824 fake_migrate=fake_migrate,
7825 polymodel=polymodel)
7826 finally:
7827 GLOBAL_LOCKER.release()
7828 else:
7829 table._dbt = None
7830 on_define = args_get('on_define',None)
7831 if on_define: on_define(table)
7832 return table
7833
7834 - def as_dict(self, flat=False, sanitize=True, field_options=True):
7835 dbname = db_uid = uri = None
7836 if not sanitize:
7837 uri, dbname, db_uid = (self._uri, self._dbname, self._db_uid)
7838 db_as_dict = dict(items={}, tables=[], uri=uri, dbname=dbname,
7839 db_uid=db_uid,
7840 **dict([(k, getattr(self, "_" + k)) for
7841 k in 'pool_size','folder','db_codec',
7842 'check_reserved','migrate','fake_migrate',
7843 'migrate_enabled','fake_migrate_all',
7844 'decode_credentials','driver_args',
7845 'adapter_args', 'attempts',
7846 'bigint_id','debug','lazy_tables',
7847 'do_connect']))
7848
7849 for table in self:
7850 tablename = str(table)
7851 db_as_dict["tables"].append(tablename)
7852 db_as_dict["items"][tablename] = table.as_dict(flat=flat,
7853 sanitize=sanitize,
7854 field_options=field_options)
7855 return db_as_dict
7856
7857 - def as_xml(self, sanitize=True, field_options=True):
7863
7864 - def as_json(self, sanitize=True, field_options=True):
7870
7871 - def as_yaml(self, sanitize=True, field_options=True):
7877
7879 try:
7880 return tablename in self.tables
7881 except AttributeError:
7882
7883 return False
7884
7885 has_key = __contains__
7886
7887 - def get(self,key,default=None):
7888 return self.__dict__.get(key,default)
7889
7891 for tablename in self.tables:
7892 yield self[tablename]
7893
7896
7898 if ogetattr(self,'_lazy_tables') and \
7899 key in ogetattr(self,'_LAZY_TABLES'):
7900 tablename, fields, args = self._LAZY_TABLES.pop(key)
7901 return self.lazy_define_table(tablename,*fields,**args)
7902 return ogetattr(self, key)
7903
7905 osetattr(self, str(key), value)
7906
7908 if key[:1]!='_' and key in self:
7909 raise SyntaxError(
7910 'Object %s exists and cannot be redefined' % key)
7911 osetattr(self,key,value)
7912
7913 __delitem__ = object.__delattr__
7914
7916 if hasattr(self,'_uri'):
7917 return '<DAL uri="%s">' % hide_password(str(self._uri))
7918 else:
7919 return '<DAL db_uid="%s">' % self._db_uid
7920
7923
7924 - def __call__(self, query=None, ignore_common_filters=None):
7925 if isinstance(query,Table):
7926 query = self._adapter.id_query(query)
7927 elif isinstance(query,Field):
7928 query = query!=None
7929 elif isinstance(query, dict):
7930 icf = query.get("ignore_common_filters")
7931 if icf: ignore_common_filters = icf
7932 return Set(self, query, ignore_common_filters=ignore_common_filters)
7933
7936
7939
7941 self._adapter.close()
7942 if self._db_uid in THREAD_LOCAL.db_instances:
7943 db_group = THREAD_LOCAL.db_instances[self._db_uid]
7944 db_group.remove(self)
7945 if not db_group:
7946 del THREAD_LOCAL.db_instances[self._db_uid]
7947
7948 - def executesql(self, query, placeholders=None, as_dict=False,
7949 fields=None, colnames=None):
7950 """
7951 placeholders is optional and will always be None.
7952 If using raw SQL with placeholders, placeholders may be
7953 a sequence of values to be substituted in
7954 or, (if supported by the DB driver), a dictionary with keys
7955 matching named placeholders in your SQL.
7956
7957 Added 2009-12-05 "as_dict" optional argument. Will always be
7958 None when using DAL. If using raw SQL can be set to True
7959 and the results cursor returned by the DB driver will be
7960 converted to a sequence of dictionaries keyed with the db
7961 field names. Tested with SQLite but should work with any database
7962 since the cursor.description used to get field names is part of the
7963 Python dbi 2.0 specs. Results returned with as_dict=True are
7964 the same as those returned when applying .to_list() to a DAL query.
7965
7966 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
7967
7968 Added 2012-08-24 "fields" and "colnames" optional arguments. If either
7969 is provided, the results cursor returned by the DB driver will be
7970 converted to a DAL Rows object using the db._adapter.parse() method.
7971
7972 The "fields" argument is a list of DAL Field objects that match the
7973 fields returned from the DB. The Field objects should be part of one or
7974 more Table objects defined on the DAL object. The "fields" list can
7975 include one or more DAL Table objects in addition to or instead of
7976 including Field objects, or it can be just a single table (not in a
7977 list). In that case, the Field objects will be extracted from the
7978 table(s).
7979
7980 Instead of specifying the "fields" argument, the "colnames" argument
7981 can be specified as a list of field names in tablename.fieldname format.
7982 Again, these should represent tables and fields defined on the DAL
7983 object.
7984
7985 It is also possible to specify both "fields" and the associated
7986 "colnames". In that case, "fields" can also include DAL Expression
7987 objects in addition to Field objects. For Field objects in "fields",
7988 the associated "colnames" must still be in tablename.fieldname format.
7989 For Expression objects in "fields", the associated "colnames" can
7990 be any arbitrary labels.
7991
7992 Note, the DAL Table objects referred to by "fields" or "colnames" can
7993 be dummy tables and do not have to represent any real tables in the
7994 database. Also, note that the "fields" and "colnames" must be in the
7995 same order as the fields in the results cursor returned from the DB.
7996 """
7997 adapter = self._adapter
7998 if placeholders:
7999 adapter.execute(query, placeholders)
8000 else:
8001 adapter.execute(query)
8002 if as_dict:
8003 if not hasattr(adapter.cursor,'description'):
8004 raise RuntimeError("database does not support executesql(...,as_dict=True)")
8005
8006
8007
8008 columns = adapter.cursor.description
8009
8010 fields = [f[0] for f in columns]
8011
8012 data = adapter._fetchall()
8013
8014
8015 return [dict(zip(fields,row)) for row in data]
8016 try:
8017 data = adapter._fetchall()
8018 except:
8019 return None
8020 if fields or colnames:
8021 fields = [] if fields is None else fields
8022 if not isinstance(fields, list):
8023 fields = [fields]
8024 extracted_fields = []
8025 for field in fields:
8026 if isinstance(field, Table):
8027 extracted_fields.extend([f for f in field])
8028 else:
8029 extracted_fields.append(field)
8030 if not colnames:
8031 colnames = ['%s.%s' % (f.tablename, f.name)
8032 for f in extracted_fields]
8033 data = adapter.parse(
8034 data, fields=extracted_fields, colnames=colnames)
8035 return data
8036
8038 for table in self:
8039 table._referenced_by = [field for field in table._referenced_by
8040 if not field.table==thistable]
8041
8043 step = long(kwargs.get('max_fetch_rows,',500))
8044 write_colnames = kwargs['write_colnames'] = \
8045 kwargs.get("write_colnames", True)
8046 for table in self.tables:
8047 ofile.write('TABLE %s\r\n' % table)
8048 query = self._adapter.id_query(self[table])
8049 nrows = self(query).count()
8050 kwargs['write_colnames'] = write_colnames
8051 for k in range(0,nrows,step):
8052 self(query).select(limitby=(k,k+step)).export_to_csv_file(
8053 ofile, *args, **kwargs)
8054 kwargs['write_colnames'] = False
8055 ofile.write('\r\n\r\n')
8056 ofile.write('END')
8057
8058 - def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
8059 unique='uuid', *args, **kwargs):
8060
8061 id_offset = {}
8062 for line in ifile:
8063 line = line.strip()
8064 if not line:
8065 continue
8066 elif line == 'END':
8067 return
8068 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
8069 raise SyntaxError('invalid file format')
8070 else:
8071 tablename = line[6:]
8072 self[tablename].import_from_csv_file(
8073 ifile, id_map, null, unique, id_offset, *args, **kwargs)
8074
8076 return DAL('<zombie>',db_uid=db_uid)
8077
8080
8081 copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
8084 """
8085 Helper class providing a comma-separated string having all the field names
8086 (prefixed by table name and '.')
8087
8088 normally only called from within gluon.sql
8089 """
8090
8093
8095 return ', '.join([str(field) for field in self._table])
8096
8099
8101 if not self._record:
8102 self._record = self._table[long(self)]
8103 if not self._record:
8104 raise RuntimeError(
8105 "Using a recursive select but encountered a broken reference: %s %d"%(self._table, long(self)))
8106
8108 if key == 'id':
8109 return long(self)
8110 self.__allocate()
8111 return self._record.get(key, None)
8112
8113 - def get(self, key, default=None):
8115
8122
8124 if key == 'id':
8125 return long(self)
8126 self.__allocate()
8127 return self._record.get(key, None)
8128
8130 self.__allocate()
8131 self._record[key] = value
8132
8135 return marshal.loads(data)
8136
8138 try:
8139 marshal_dump = marshal.dumps(long(data))
8140 except AttributeError:
8141 marshal_dump = 'i%s' % struct.pack('<i', long(data))
8142 return (Reference_unpickler, (marshal_dump,))
8143
8144 copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
8154 def _decorated(f):
8155 instance = self.table
8156 import types
8157 method = types.MethodType(f, instance, instance.__class__)
8158 name = method_name or f.func_name
8159 setattr(instance, name, method)
8160 return f
8161 return _decorated
8162
8164
8165 """
8166 an instance of this class represents a database table
8167
8168 Example::
8169
8170 db = DAL(...)
8171 db.define_table('users', Field('name'))
8172 db.users.insert(name='me') # print db.users._insert(...) to see SQL
8173 db.users.drop()
8174 """
8175
8176 - def __init__(
8177 self,
8178 db,
8179 tablename,
8180 *fields,
8181 **args
8182 ):
8183 """
8184 Initializes the table and performs checking on the provided fields.
8185
8186 Each table will have automatically an 'id'.
8187
8188 If a field is of type Table, the fields (excluding 'id') from that table
8189 will be used instead.
8190
8191 :raises SyntaxError: when a supplied field is of incorrect type.
8192 """
8193 self._actual = False
8194 self._tablename = tablename
8195 self._ot = args.get('actual_name')
8196 self._sequence_name = args.get('sequence_name') or \
8197 db and db._adapter.sequence_name(tablename)
8198 self._trigger_name = args.get('trigger_name') or \
8199 db and db._adapter.trigger_name(tablename)
8200 self._common_filter = args.get('common_filter')
8201 self._format = args.get('format')
8202 self._singular = args.get(
8203 'singular',tablename.replace('_',' ').capitalize())
8204 self._plural = args.get(
8205 'plural',pluralize(self._singular.lower()).capitalize())
8206
8207 if 'primarykey' in args and args['primarykey'] is not None:
8208 self._primarykey = args.get('primarykey')
8209
8210 self._before_insert = []
8211 self._before_update = [Set.delete_uploaded_files]
8212 self._before_delete = [Set.delete_uploaded_files]
8213 self._after_insert = []
8214 self._after_update = []
8215 self._after_delete = []
8216
8217 self.add_method = MethodAdder(self)
8218
8219 fieldnames,newfields=set(),[]
8220 if hasattr(self,'_primarykey'):
8221 if not isinstance(self._primarykey,list):
8222 raise SyntaxError(
8223 "primarykey must be a list of fields from table '%s'" \
8224 % tablename)
8225 if len(self._primarykey)==1:
8226 self._id = [f for f in fields if isinstance(f,Field) \
8227 and f.name==self._primarykey[0]][0]
8228 elif not [f for f in fields if isinstance(f,Field) and f.type=='id']:
8229 field = Field('id', 'id')
8230 newfields.append(field)
8231 fieldnames.add('id')
8232 self._id = field
8233 virtual_fields = []
8234 for field in fields:
8235 if isinstance(field, (FieldMethod, FieldVirtual)):
8236 virtual_fields.append(field)
8237 elif isinstance(field, Field) and not field.name in fieldnames:
8238 if field.db is not None:
8239 field = copy.copy(field)
8240 newfields.append(field)
8241 fieldnames.add(field.name)
8242 if field.type=='id':
8243 self._id = field
8244 elif isinstance(field, Table):
8245 table = field
8246 for field in table:
8247 if not field.name in fieldnames and not field.type=='id':
8248 t2 = not table._actual and self._tablename
8249 field = field.clone(point_self_references_to=t2)
8250 newfields.append(field)
8251 fieldnames.add(field.name)
8252 elif not isinstance(field, (Field, Table)):
8253 raise SyntaxError(
8254 'define_table argument is not a Field or Table: %s' % field)
8255 fields = newfields
8256 self._db = db
8257 tablename = tablename
8258 self._fields = SQLCallableList()
8259 self.virtualfields = []
8260 fields = list(fields)
8261
8262 if db and db._adapter.uploads_in_blob==True:
8263 uploadfields = [f.name for f in fields if f.type=='blob']
8264 for field in fields:
8265 fn = field.uploadfield
8266 if isinstance(field, Field) and field.type == 'upload'\
8267 and fn is True:
8268 fn = field.uploadfield = '%s_blob' % field.name
8269 if isinstance(fn,str) and not fn in uploadfields:
8270 fields.append(Field(fn,'blob',default='',
8271 writable=False,readable=False))
8272
8273 lower_fieldnames = set()
8274 reserved = dir(Table) + ['fields']
8275 for field in fields:
8276 field_name = field.name
8277 if db and db.check_reserved:
8278 db.check_reserved_keyword(field_name)
8279 elif field_name in reserved:
8280 raise SyntaxError("field name %s not allowed" % field_name)
8281
8282 if field_name.lower() in lower_fieldnames:
8283 raise SyntaxError("duplicate field %s in table %s" \
8284 % (field_name, tablename))
8285 else:
8286 lower_fieldnames.add(field_name.lower())
8287
8288 self.fields.append(field_name)
8289 self[field_name] = field
8290 if field.type == 'id':
8291 self['id'] = field
8292 field.tablename = field._tablename = tablename
8293 field.table = field._table = self
8294 field.db = field._db = db
8295 if db and not field.type in ('text', 'blob', 'json') and \
8296 db._adapter.maxcharlength < field.length:
8297 field.length = db._adapter.maxcharlength
8298 self.ALL = SQLALL(self)
8299
8300 if hasattr(self,'_primarykey'):
8301 for k in self._primarykey:
8302 if k not in self.fields:
8303 raise SyntaxError(
8304 "primarykey must be a list of fields from table '%s " % tablename)
8305 else:
8306 self[k].notnull = True
8307 for field in virtual_fields:
8308 self[field.name] = field
8309
8310 @property
8313
8314 - def update(self,*args,**kwargs):
8315 raise RuntimeError("Syntax Not Supported")
8316
8317 - def _enable_record_versioning(self,
8318 archive_db=None,
8319 archive_name = '%(tablename)s_archive',
8320 current_record = 'current_record',
8321 is_active = 'is_active'):
8322 archive_db = archive_db or self._db
8323 archive_name = archive_name % dict(tablename=self._tablename)
8324 if archive_name in archive_db.tables():
8325 return
8326 fieldnames = self.fields()
8327 field_type = self if archive_db is self._db else 'bigint'
8328 archive_db.define_table(
8329 archive_name,
8330 Field(current_record,field_type),
8331 *[field.clone(unique=False) for field in self])
8332 self._before_update.append(
8333 lambda qset,fs,db=archive_db,an=archive_name,cn=current_record:
8334 archive_record(qset,fs,db[an],cn))
8335 if is_active and is_active in fieldnames:
8336 self._before_delete.append(
8337 lambda qset: qset.update(is_active=False))
8338 newquery = lambda query, t=self: t.is_active == True
8339 query = self._common_filter
8340 if query:
8341 newquery = query & newquery
8342 self._common_filter = newquery
8343
8351
8353 db = self._db
8354 pr = db._pending_references
8355 self._referenced_by = []
8356 for field in self:
8357 fieldname = field.name
8358 field_type = field.type
8359 if isinstance(field_type,str) and field_type[:10] == 'reference ':
8360 ref = field_type[10:].strip()
8361 if not ref.split():
8362 raise SyntaxError('Table: reference to nothing: %s' %ref)
8363 refs = ref.split('.')
8364 rtablename = refs[0]
8365 if not rtablename in db:
8366 pr[rtablename] = pr.get(rtablename,[]) + [field]
8367 continue
8368 rtable = db[rtablename]
8369 if len(refs)==2:
8370 rfieldname = refs[1]
8371 if not hasattr(rtable,'_primarykey'):
8372 raise SyntaxError(
8373 'keyed tables can only reference other keyed tables (for now)')
8374 if rfieldname not in rtable.fields:
8375 raise SyntaxError(
8376 "invalid field '%s' for referenced table '%s' in table '%s'" \
8377 % (rfieldname, rtablename, self._tablename))
8378 rtable._referenced_by.append(field)
8379 for referee in pr.get(self._tablename,[]):
8380 self._referenced_by.append(referee)
8381
8383 return dict([(k, v) for (k, v) in record.iteritems() if k
8384 in self.fields and (self[k].type!='id' or id)])
8385
8387 """ for keyed table only """
8388 query = None
8389 for k,v in key.iteritems():
8390 if k in self._primarykey:
8391 if query:
8392 query = query & (self[k] == v)
8393 else:
8394 query = (self[k] == v)
8395 else:
8396 raise SyntaxError(
8397 'Field %s is not part of the primary key of %s' % \
8398 (k,self._tablename))
8399 return query
8400
8402 if not key:
8403 return None
8404 elif isinstance(key, dict):
8405 """ for keyed table """
8406 query = self._build_query(key)
8407 rows = self._db(query).select()
8408 if rows:
8409 return rows[0]
8410 return None
8411 elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key):
8412 return self._db(self._id == key).select(limitby=(0,1), orderby_on_limitby=False).first()
8413 elif key:
8414 return ogetattr(self, str(key))
8415
8417 for_update = kwargs.get('_for_update',False)
8418 if '_for_update' in kwargs: del kwargs['_for_update']
8419
8420 orderby = kwargs.get('_orderby',None)
8421 if '_orderby' in kwargs: del kwargs['_orderby']
8422
8423 if not key is DEFAULT:
8424 if isinstance(key, Query):
8425 record = self._db(key).select(
8426 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8427 elif not str(key).isdigit():
8428 record = None
8429 else:
8430 record = self._db(self._id == key).select(
8431 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8432 if record:
8433 for k,v in kwargs.iteritems():
8434 if record[k]!=v: return None
8435 return record
8436 elif kwargs:
8437 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()])
8438 return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8439 else:
8440 return None
8441
8443 if isinstance(key, dict) and isinstance(value, dict):
8444 """ option for keyed table """
8445 if set(key.keys()) == set(self._primarykey):
8446 value = self._filter_fields(value)
8447 kv = {}
8448 kv.update(value)
8449 kv.update(key)
8450 if not self.insert(**kv):
8451 query = self._build_query(key)
8452 self._db(query).update(**self._filter_fields(value))
8453 else:
8454 raise SyntaxError(
8455 'key must have all fields from primary key: %s'%\
8456 (self._primarykey))
8457 elif str(key).isdigit():
8458 if key == 0:
8459 self.insert(**self._filter_fields(value))
8460 elif self._db(self._id == key)\
8461 .update(**self._filter_fields(value)) is None:
8462 raise SyntaxError('No such record: %s' % key)
8463 else:
8464 if isinstance(key, dict):
8465 raise SyntaxError(
8466 'value must be a dictionary: %s' % value)
8467 osetattr(self, str(key), value)
8468
8469 __getattr__ = __getitem__
8470
8472 if key[:1]!='_' and key in self:
8473 raise SyntaxError('Object exists and cannot be redefined: %s' % key)
8474 osetattr(self,key,value)
8475
8477 if isinstance(key, dict):
8478 query = self._build_query(key)
8479 if not self._db(query).delete():
8480 raise SyntaxError('No such record: %s' % key)
8481 elif not str(key).isdigit() or \
8482 not self._db(self._id == key).delete():
8483 raise SyntaxError('No such record: %s' % key)
8484
8486 return hasattr(self,key)
8487
8488 has_key = __contains__
8489
8491 return self.__dict__.items()
8492
8494 for fieldname in self.fields:
8495 yield self[fieldname]
8496
8499
8500
8502 return '<Table %s (%s)>' % (self._tablename,','.join(self.fields()))
8503
8505 if self._ot is not None:
8506 if 'Oracle' in str(type(self._db._adapter)):
8507 return '%s %s' % (self._ot, self._tablename)
8508 return '%s AS %s' % (self._ot, self._tablename)
8509 return self._tablename
8510
8511 - def _drop(self, mode = ''):
8512 return self._db._adapter._drop(self, mode)
8513
8514 - def drop(self, mode = ''):
8515 return self._db._adapter.drop(self,mode)
8516
8517 - def _listify(self,fields,update=False):
8518 new_fields = {}
8519
8520
8521 for name in fields:
8522 if not name in self.fields:
8523 if name != 'id':
8524 raise SyntaxError(
8525 'Field %s does not belong to the table' % name)
8526 else:
8527 field = self[name]
8528 value = fields[name]
8529 if field.filter_in:
8530 value = field.filter_in(value)
8531 new_fields[name] = (field,value)
8532
8533
8534 to_compute = []
8535 for ofield in self:
8536 name = ofield.name
8537 if not name in new_fields:
8538
8539 if ofield.compute:
8540 to_compute.append((name,ofield))
8541
8542 elif not update and not ofield.default is None:
8543 value = ofield.default
8544 fields[name] = value
8545 new_fields[name] = (ofield,value)
8546
8547 elif update and not ofield.update is None:
8548 value = ofield.update
8549 fields[name] = value
8550 new_fields[name] = (ofield,value)
8551
8552 elif not update and ofield.required:
8553 raise RuntimeError(
8554 'Table: missing required field: %s' % name)
8555
8556 if to_compute:
8557 row = Row(fields)
8558 for name,ofield in to_compute:
8559
8560 try:
8561 new_fields[name] = (ofield,ofield.compute(row))
8562 except (KeyError, AttributeError):
8563
8564 if ofield.required:
8565 raise SyntaxError('unable to comput field: %s' % name)
8566 return new_fields.values()
8567
8569 for field in self:
8570 if field.type=='upload' and field.name in fields:
8571 value = fields[field.name]
8572 if value and not isinstance(value,str):
8573 if hasattr(value,'file') and hasattr(value,'filename'):
8574 new_name = field.store(value.file,filename=value.filename)
8575 elif hasattr(value,'read') and hasattr(value,'name'):
8576 new_name = field.store(value,filename=value.name)
8577 else:
8578 raise RuntimeError("Unable to handle upload")
8579 fields[field.name] = new_name
8580
8582 "If there are no fields/values specified, return table defaults"
8583 if not fields:
8584 fields = {}
8585 for field in self:
8586 if field.type != "id":
8587 fields[field.name] = field.default
8588 return fields
8589
8593
8603
8619
8621 if _key is DEFAULT:
8622 record = self(**values)
8623 elif isinstance(_key,dict):
8624 record = self(**_key)
8625 else:
8626 record = self(_key)
8627 if record:
8628 record.update_record(**values)
8629 newid = None
8630 else:
8631 newid = self.insert(**values)
8632 return newid
8633
8635 """
8636 here items is a list of dictionaries
8637 """
8638 items = [self._listify(item) for item in items]
8639 if any(f(item) for item in items for f in self._before_insert):return 0
8640 ret = self._db._adapter.bulk_insert(self,items)
8641 ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
8642 return ret
8643
8645 return self._db._adapter._truncate(self, mode)
8646
8648 return self._db._adapter.truncate(self, mode)
8649
8650 - def import_from_csv_file(
8651 self,
8652 csvfile,
8653 id_map=None,
8654 null='<NULL>',
8655 unique='uuid',
8656 id_offset=None,
8657 *args, **kwargs
8658 ):
8659 """
8660 Import records from csv file.
8661 Column headers must have same names as table fields.
8662 Field 'id' is ignored.
8663 If column names read 'table.file' the 'table.' prefix is ignored.
8664 'unique' argument is a field which must be unique
8665 (typically a uuid field)
8666 'restore' argument is default False;
8667 if set True will remove old values in table first.
8668 'id_map' ff set to None will not map ids.
8669 The import will keep the id numbers in the restored table.
8670 This assumes that there is an field of type id that
8671 is integer and in incrementing order.
8672 Will keep the id numbers in restored table.
8673 """
8674
8675 delimiter = kwargs.get('delimiter', ',')
8676 quotechar = kwargs.get('quotechar', '"')
8677 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
8678 restore = kwargs.get('restore', False)
8679 if restore:
8680 self._db[self].truncate()
8681
8682 reader = csv.reader(csvfile, delimiter=delimiter,
8683 quotechar=quotechar, quoting=quoting)
8684 colnames = None
8685 if isinstance(id_map, dict):
8686 if not self._tablename in id_map:
8687 id_map[self._tablename] = {}
8688 id_map_self = id_map[self._tablename]
8689
8690 def fix(field, value, id_map, id_offset):
8691 list_reference_s='list:reference'
8692 if value == null:
8693 value = None
8694 elif field.type=='blob':
8695 value = base64.b64decode(value)
8696 elif field.type=='json':
8697 try:
8698 json = serializers.json
8699 value = json(value)
8700 except TypeError:
8701 pass
8702 elif field.type=='double' or field.type=='float':
8703 if not value.strip():
8704 value = None
8705 else:
8706 value = float(value)
8707 elif field.type in ('integer','bigint'):
8708 if not value.strip():
8709 value = None
8710 else:
8711 value = long(value)
8712 elif field.type.startswith('list:string'):
8713 value = bar_decode_string(value)
8714 elif field.type.startswith(list_reference_s):
8715 ref_table = field.type[len(list_reference_s):].strip()
8716 if id_map is not None:
8717 value = [id_map[ref_table][long(v)] \
8718 for v in bar_decode_string(value)]
8719 else:
8720 value = [v for v in bar_decode_string(value)]
8721 elif field.type.startswith('list:'):
8722 value = bar_decode_integer(value)
8723 elif id_map and field.type.startswith('reference'):
8724 try:
8725 value = id_map[field.type[9:].strip()][long(value)]
8726 except KeyError:
8727 pass
8728 elif id_offset and field.type.startswith('reference'):
8729 try:
8730 value = id_offset[field.type[9:].strip()]+long(value)
8731 except KeyError:
8732 pass
8733 return (field.name, value)
8734
8735 def is_id(colname):
8736 if colname in self:
8737 return self[colname].type == 'id'
8738 else:
8739 return False
8740
8741 first = True
8742 unique_idx = None
8743 for line in reader:
8744 if not line:
8745 break
8746 if not colnames:
8747 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
8748 cols, cid = [], None
8749 for i,colname in enumerate(colnames):
8750 if is_id(colname):
8751 cid = i
8752 else:
8753 cols.append(i)
8754 if colname == unique:
8755 unique_idx = i
8756 else:
8757 items = [fix(self[colnames[i]], line[i], id_map, id_offset) \
8758 for i in cols if colnames[i] in self.fields]
8759
8760 if not id_map and cid is not None and id_offset is not None and not unique_idx:
8761 csv_id = long(line[cid])
8762 curr_id = self.insert(**dict(items))
8763 if first:
8764 first = False
8765
8766
8767
8768 if curr_id>csv_id:
8769 id_offset[self._tablename] = curr_id-csv_id
8770 else:
8771 id_offset[self._tablename] = 0
8772
8773 while curr_id<csv_id+id_offset[self._tablename]:
8774 self._db(self._db[self][colnames[cid]] == curr_id).delete()
8775 curr_id = self.insert(**dict(items))
8776
8777
8778 elif not unique_idx:
8779 new_id = self.insert(**dict(items))
8780 else:
8781 unique_value = line[unique_idx]
8782 query = self._db[self][unique] == unique_value
8783 record = self._db(query).select().first()
8784 if record:
8785 record.update_record(**dict(items))
8786 new_id = record[self._id.name]
8787 else:
8788 new_id = self.insert(**dict(items))
8789 if id_map and cid is not None:
8790 id_map_self[long(line[cid])] = new_id
8791
8792 - def as_dict(self, flat=False, sanitize=True, field_options=True):
8793 tablename = str(self)
8794 table_as_dict = dict(name=tablename, items={}, fields=[],
8795 sequence_name=self._sequence_name,
8796 trigger_name=self._trigger_name,
8797 common_filter=self._common_filter, format=self._format,
8798 singular=self._singular, plural=self._plural)
8799
8800 for field in self:
8801 if (field.readable or field.writable) or (not sanitize):
8802 table_as_dict["fields"].append(field.name)
8803 table_as_dict["items"][field.name] = \
8804 field.as_dict(flat=flat, sanitize=sanitize,
8805 options=field_options)
8806 return table_as_dict
8807
8808 - def as_xml(self, sanitize=True, field_options=True):
8814
8815 - def as_json(self, sanitize=True, field_options=True):
8821
8822 - def as_yaml(self, sanitize=True, field_options=True):
8828
8831
8832 - def on(self, query):
8833 return Expression(self._db,self._db._adapter.ON,self,query)
8834
8836 tablenames = qset.db._adapter.tables(qset.query)
8837 if len(tablenames)!=1: raise RuntimeError("cannot update join")
8838 table = qset.db[tablenames[0]]
8839 for row in qset.select():
8840 fields = archive_table._filter_fields(row)
8841 fields[current_record] = row.id
8842 archive_table.insert(**fields)
8843 return False
8844
8848
8849 - def __init__(
8850 self,
8851 db,
8852 op,
8853 first=None,
8854 second=None,
8855 type=None,
8856 **optional_args
8857 ):
8858
8859 self.db = db
8860 self.op = op
8861 self.first = first
8862 self.second = second
8863 self._table = getattr(first,'_table',None)
8864
8865 if not type and first and hasattr(first,'type'):
8866 self.type = first.type
8867 else:
8868 self.type = type
8869 self.optional_args = optional_args
8870
8874
8878
8882
8886
8890
8894
8898
8902
8906
8910
8914
8918
8922
8926
8930
8934
8938
8942
8944 db = self.db
8945 if start < 0:
8946 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
8947 else:
8948 pos0 = start + 1
8949
8950 if stop < 0:
8951 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
8952 elif stop == sys.maxint:
8953 length = self.len()
8954 else:
8955 length = '(%s - %s)' % (stop + 1, pos0)
8956 return Expression(db,db._adapter.SUBSTRING,
8957 self, (pos0, length), self.type)
8958
8960 return self[i:i + 1]
8961
8963 return self.db._adapter.expand(self,self.type)
8964
8966 db = self.db
8967 return Expression(db,db._adapter.COMMA,self,other,self.type)
8968
8974
8978
8980 db = self.db
8981 if self.type in ('integer','bigint'):
8982 result_type = 'integer'
8983 elif self.type in ['date','time','datetime','double','float']:
8984 result_type = 'double'
8985 elif self.type.startswith('decimal('):
8986 result_type = self.type
8987 else:
8988 raise SyntaxError("subtraction operation not supported for type")
8989 return Expression(db,db._adapter.SUB,self,other,result_type)
8990
8994
8998
9002
9006
9010
9014
9018
9022
9026
9027 - def like(self, value, case_sensitive=False):
9028 db = self.db
9029 op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
9030 return Query(db, op, self, value)
9031
9035
9037 """
9038 Accepts the following inputs:
9039 field.belongs(1,2)
9040 field.belongs((1,2))
9041 field.belongs(query)
9042
9043 Does NOT accept:
9044 field.belongs(1)
9045 """
9046 db = self.db
9047 if len(value) == 1:
9048 value = value[0]
9049 if isinstance(value,Query):
9050 value = db(value)._select(value.first._table._id)
9051 return Query(db, db._adapter.BELONGS, self, value)
9052
9054 db = self.db
9055 if not self.type in ('string', 'text', 'json'):
9056 raise SyntaxError("startswith used with incompatible field type")
9057 return Query(db, db._adapter.STARTSWITH, self, value)
9058
9060 db = self.db
9061 if not self.type in ('string', 'text', 'json'):
9062 raise SyntaxError("endswith used with incompatible field type")
9063 return Query(db, db._adapter.ENDSWITH, self, value)
9064
9065 - def contains(self, value, all=False, case_sensitive=False):
9066 """
9067 The case_sensitive parameters is only useful for PostgreSQL
9068 For other RDMBs it is ignored and contains is always case in-sensitive
9069 For MongoDB and GAE contains is always case sensitive
9070 """
9071 db = self.db
9072 if isinstance(value,(list, tuple)):
9073 subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive)
9074 for v in value if str(v).strip()]
9075 if not subqueries:
9076 return self.contains('')
9077 else:
9078 return reduce(all and AND or OR,subqueries)
9079 if not self.type in ('string', 'text', 'json') and not self.type.startswith('list:'):
9080 raise SyntaxError("contains used with incompatible field type")
9081 return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
9082
9086
9087
9088
9089 - def st_asgeojson(self, precision=15, options=0, version=1):
9093
9094 - def st_astext(self):
9095 db = self.db
9096 return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
9097
9101
9105
9109
9113
9114
9115
9119
9123
9127
9131
9135
9139
9144 """
9145 allows defining of custom SQL types
9146
9147 Example::
9148
9149 decimal = SQLCustomType(
9150 type ='double',
9151 native ='integer',
9152 encoder =(lambda x: int(float(x) * 100)),
9153 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
9154 )
9155
9156 db.define_table(
9157 'example',
9158 Field('value', type=decimal)
9159 )
9160
9161 :param type: the web2py type (default = 'string')
9162 :param native: the backend type
9163 :param encoder: how to encode the value to store it in the backend
9164 :param decoder: how to decode the value retrieved from the backend
9165 :param validator: what validators to use ( default = None, will use the
9166 default validator for type)
9167 """
9168
9169 - def __init__(
9170 self,
9171 type='string',
9172 native=None,
9173 encoder=None,
9174 decoder=None,
9175 validator=None,
9176 _class=None,
9177 ):
9178
9179 self.type = type
9180 self.native = native
9181 self.encoder = encoder or (lambda x: x)
9182 self.decoder = decoder or (lambda x: x)
9183 self.validator = validator
9184 self._class = _class or type
9185
9187 try:
9188 return self.type.startswith(self, text)
9189 except TypeError:
9190 return False
9191
9194
9197
9200
9202 - def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
9217 return '%s.%s' % (self.tablename, self.name)
9218
9220 - def __init__(self, name, f=None, handler=None):
9224
9226 return ', '.join(str(y) for y in x or [])
9227
9228 -class Field(Expression):
9229
9230 Virtual = FieldVirtual
9231 Method = FieldMethod
9232 Lazy = FieldMethod
9233
9234 """
9235 an instance of this class represents a database field
9236
9237 example::
9238
9239 a = Field(name, 'string', length=32, default=None, required=False,
9240 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
9241 notnull=False, unique=False,
9242 uploadfield=True, widget=None, label=None, comment=None,
9243 uploadfield=True, # True means store on disk,
9244 # 'a_field_name' means store in this field in db
9245 # False means file content will be discarded.
9246 writable=True, readable=True, update=None, authorize=None,
9247 autodelete=False, represent=None, uploadfolder=None,
9248 uploadseparate=False # upload to separate directories by uuid_keys
9249 # first 2 character and tablename.fieldname
9250 # False - old behavior
9251 # True - put uploaded file in
9252 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
9253 # directory)
9254 uploadfs=None # a pyfilesystem where to store upload
9255
9256 to be used as argument of DAL.define_table
9257
9258 allowed field types:
9259 string, boolean, integer, double, text, blob,
9260 date, time, datetime, upload, password
9261
9262 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
9263 fields should have a default or they will be required in SQLFORMs
9264 the requires argument is used to validate the field input in SQLFORMs
9265
9266 """
9267
9268 - def __init__(
9269 self,
9270 fieldname,
9271 type='string',
9272 length=None,
9273 default=DEFAULT,
9274 required=False,
9275 requires=DEFAULT,
9276 ondelete='CASCADE',
9277 notnull=False,
9278 unique=False,
9279 uploadfield=True,
9280 widget=None,
9281 label=None,
9282 comment=None,
9283 writable=True,
9284 readable=True,
9285 update=None,
9286 authorize=None,
9287 autodelete=False,
9288 represent=None,
9289 uploadfolder=None,
9290 uploadseparate=False,
9291 uploadfs=None,
9292 compute=None,
9293 custom_store=None,
9294 custom_retrieve=None,
9295 custom_retrieve_file_properties=None,
9296 custom_delete=None,
9297 filter_in = None,
9298 filter_out = None,
9299 custom_qualifier = None,
9300 map_none = None,
9301 ):
9302 self._db = self.db = None
9303 self.op = None
9304 self.first = None
9305 self.second = None
9306 self.name = fieldname = cleanup(fieldname)
9307 if not isinstance(fieldname,str) or hasattr(Table,fieldname) or \
9308 fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname):
9309 raise SyntaxError('Field: invalid field name: %s' % fieldname)
9310 self.type = type if not isinstance(type, (Table,Field)) else 'reference %s' % type
9311 self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512)
9312 self.default = default if default!=DEFAULT else (update or None)
9313 self.required = required
9314 self.ondelete = ondelete.upper()
9315 self.notnull = notnull
9316 self.unique = unique
9317 self.uploadfield = uploadfield
9318 self.uploadfolder = uploadfolder
9319 self.uploadseparate = uploadseparate
9320 self.uploadfs = uploadfs
9321 self.widget = widget
9322 self.comment = comment
9323 self.writable = writable
9324 self.readable = readable
9325 self.update = update
9326 self.authorize = authorize
9327 self.autodelete = autodelete
9328 self.represent = list_represent if \
9329 represent==None and type in ('list:integer','list:string') else represent
9330 self.compute = compute
9331 self.isattachment = True
9332 self.custom_store = custom_store
9333 self.custom_retrieve = custom_retrieve
9334 self.custom_retrieve_file_properties = custom_retrieve_file_properties
9335 self.custom_delete = custom_delete
9336 self.filter_in = filter_in
9337 self.filter_out = filter_out
9338 self.custom_qualifier = custom_qualifier
9339 self.label = label if label!=None else fieldname.replace('_',' ').title()
9340 self.requires = requires if requires!=None else []
9341 self.map_none = map_none
9342
9344 self.__dict__.update(*args,**attributes)
9345
9346 - def clone(self,point_self_references_to=False,**args):
9347 field = copy.copy(self)
9348 if point_self_references_to and \
9349 field.type == 'reference %s'+field._tablename:
9350 field.type = 'reference %s' % point_self_references_to
9351 field.__dict__.update(args)
9352 return field
9353
9354 - def store(self, file, filename=None, path=None):
9355 if self.custom_store:
9356 return self.custom_store(file,filename,path)
9357 if isinstance(file, cgi.FieldStorage):
9358 filename = filename or file.filename
9359 file = file.file
9360 elif not filename:
9361 filename = file.name
9362 filename = os.path.basename(filename.replace('/', os.sep)\
9363 .replace('\\', os.sep))
9364 m = REGEX_STORE_PATTERN.search(filename)
9365 extension = m and m.group('e') or 'txt'
9366 uuid_key = web2py_uuid().replace('-', '')[-16:]
9367 encoded_filename = base64.b16encode(filename).lower()
9368 newfilename = '%s.%s.%s.%s' % \
9369 (self._tablename, self.name, uuid_key, encoded_filename)
9370 newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
9371 self_uploadfield = self.uploadfield
9372 if isinstance(self_uploadfield,Field):
9373 blob_uploadfield_name = self_uploadfield.uploadfield
9374 keys={self_uploadfield.name: newfilename,
9375 blob_uploadfield_name: file.read()}
9376 self_uploadfield.table.insert(**keys)
9377 elif self_uploadfield == True:
9378 if path:
9379 pass
9380 elif self.uploadfolder:
9381 path = self.uploadfolder
9382 elif self.db._adapter.folder:
9383 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9384 else:
9385 raise RuntimeError(
9386 "you must specify a Field(...,uploadfolder=...)")
9387 if self.uploadseparate:
9388 if self.uploadfs:
9389 raise RuntimeError("not supported")
9390 path = pjoin(path,"%s.%s" %(self._tablename, self.name),
9391 uuid_key[:2])
9392 if not exists(path):
9393 os.makedirs(path)
9394 pathfilename = pjoin(path, newfilename)
9395 if self.uploadfs:
9396 dest_file = self.uploadfs.open(newfilename, 'wb')
9397 else:
9398 dest_file = open(pathfilename, 'wb')
9399 try:
9400 shutil.copyfileobj(file, dest_file)
9401 except IOError:
9402 raise IOError(
9403 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename)
9404 dest_file.close()
9405 return newfilename
9406
9407 - def retrieve(self, name, path=None, nameonly=False):
9408 """
9409 if nameonly==True return (filename, fullfilename) instead of
9410 (filename, stream)
9411 """
9412 self_uploadfield = self.uploadfield
9413 if self.custom_retrieve:
9414 return self.custom_retrieve(name, path)
9415 import http
9416 if self.authorize or isinstance(self_uploadfield, str):
9417 row = self.db(self == name).select().first()
9418 if not row:
9419 raise http.HTTP(404)
9420 if self.authorize and not self.authorize(row):
9421 raise http.HTTP(403)
9422 m = REGEX_UPLOAD_PATTERN.match(name)
9423 if not m or not self.isattachment:
9424 raise TypeError('Can\'t retrieve %s' % name)
9425 file_properties = self.retrieve_file_properties(name,path)
9426 filename = file_properties['filename']
9427 if isinstance(self_uploadfield, str):
9428 stream = StringIO.StringIO(row[self_uploadfield] or '')
9429 elif isinstance(self_uploadfield,Field):
9430 blob_uploadfield_name = self_uploadfield.uploadfield
9431 query = self_uploadfield == name
9432 data = self_uploadfield.table(query)[blob_uploadfield_name]
9433 stream = StringIO.StringIO(data)
9434 elif self.uploadfs:
9435
9436 stream = self.uploadfs.open(name, 'rb')
9437 else:
9438
9439
9440
9441 fullname = pjoin(file_properties['path'],name)
9442 if nameonly:
9443 return (filename, fullname)
9444 stream = open(fullname,'rb')
9445 return (filename, stream)
9446
9448 self_uploadfield = self.uploadfield
9449 if self.custom_retrieve_file_properties:
9450 return self.custom_retrieve_file_properties(name, path)
9451 try:
9452 m = REGEX_UPLOAD_PATTERN.match(name)
9453 if not m or not self.isattachment:
9454 raise TypeError('Can\'t retrieve %s file properties' % name)
9455 filename = base64.b16decode(m.group('name'), True)
9456 filename = REGEX_CLEANUP_FN.sub('_', filename)
9457 except (TypeError, AttributeError):
9458 filename = name
9459 if isinstance(self_uploadfield, str):
9460 return dict(path=None,filename=filename)
9461 elif isinstance(self_uploadfield,Field):
9462 return dict(path=None,filename=filename)
9463 else:
9464
9465 if path:
9466 pass
9467 elif self.uploadfolder:
9468 path = self.uploadfolder
9469 else:
9470 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9471 if self.uploadseparate:
9472 t = m.group('table')
9473 f = m.group('field')
9474 u = m.group('uuidkey')
9475 path = pjoin(path,"%s.%s" % (t,f),u[:2])
9476 return dict(path=path,filename=filename)
9477
9478
9494
9506
9507 - def count(self, distinct=None):
9509
9510 - def as_dict(self, flat=False, sanitize=True, options=True):
9511
9512 attrs = ('type', 'length', 'default', 'required',
9513 'ondelete', 'notnull', 'unique', 'uploadfield',
9514 'widget', 'label', 'comment', 'writable', 'readable',
9515 'update', 'authorize', 'autodelete', 'represent',
9516 'uploadfolder', 'uploadseparate', 'uploadfs',
9517 'compute', 'custom_store', 'custom_retrieve',
9518 'custom_retrieve_file_properties', 'custom_delete',
9519 'filter_in', 'filter_out', 'custom_qualifier',
9520 'map_none', 'name')
9521
9522 SERIALIZABLE_TYPES = (int, long, basestring, dict, list,
9523 float, tuple, bool, type(None))
9524
9525 def flatten(obj):
9526 if flat:
9527 if isinstance(obj, flatten.__class__):
9528 return str(type(obj))
9529 elif isinstance(obj, type):
9530 try:
9531 return str(obj).split("'")[1]
9532 except IndexError:
9533 return str(obj)
9534 elif not isinstance(obj, SERIALIZABLE_TYPES):
9535 return str(obj)
9536 elif isinstance(obj, dict):
9537 newobj = dict()
9538 for k, v in obj.items():
9539 newobj[k] = flatten(v)
9540 return newobj
9541 elif isinstance(obj, (list, tuple, set)):
9542 return [flatten(v) for v in obj]
9543 else:
9544 return obj
9545 elif isinstance(obj, (dict, set)):
9546 return obj.copy()
9547 else: return obj
9548
9549 def filter_requires(t, r, options=True):
9550 if sanitize and any([keyword in str(t).upper() for
9551 keyword in ("CRYPT", "IS_STRONG")]):
9552 return None
9553
9554 if not isinstance(r, dict):
9555 if options and hasattr(r, "options"):
9556 if callable(r.options):
9557 r.options()
9558 newr = r.__dict__.copy()
9559 else:
9560 newr = r.copy()
9561
9562
9563 if not options and newr.has_key("labels"):
9564 [newr.update({key:None}) for key in
9565 ("labels", "theset") if (key in newr)]
9566
9567 for k, v in newr.items():
9568 if k == "other":
9569 if isinstance(v, dict):
9570 otype, other = v.popitem()
9571 else:
9572 otype = flatten(type(v))
9573 other = v
9574 newr[k] = {otype: filter_requires(otype, other,
9575 options=options)}
9576 else:
9577 newr[k] = flatten(v)
9578 return newr
9579
9580 if isinstance(self.requires, (tuple, list, set)):
9581 requires = dict([(flatten(type(r)),
9582 filter_requires(type(r), r,
9583 options=options)) for
9584 r in self.requires])
9585 else:
9586 requires = {flatten(type(self.requires)):
9587 filter_requires(type(self.requires),
9588 self.requires, options=options)}
9589
9590 d = dict(colname="%s.%s" % (self.tablename, self.name),
9591 requires=requires)
9592 d.update([(attr, flatten(getattr(self, attr))) for attr in attrs])
9593 return d
9594
9595 - def as_xml(self, sanitize=True, options=True):
9603
9604 - def as_json(self, sanitize=True, options=True):
9612
9613 - def as_yaml(self, sanitize=True, options=True):
9620
9623
9625 try:
9626 return '%s.%s' % (self.tablename, self.name)
9627 except:
9628 return '<no table>.%s' % self.name
9629
9630
9631 -class Query(object):
9632
9633 """
9634 a query object necessary to define a set.
9635 it can be stored or can be passed to DAL.__call__() to obtain a Set
9636
9637 Example::
9638
9639 query = db.users.name=='Max'
9640 set = db(query)
9641 records = set.select()
9642
9643 """
9644
9645 - def __init__(
9646 self,
9647 db,
9648 op,
9649 first=None,
9650 second=None,
9651 ignore_common_filters = False,
9652 **optional_args
9653 ):
9654 self.db = self._db = db
9655 self.op = op
9656 self.first = first
9657 self.second = second
9658 self.ignore_common_filters = ignore_common_filters
9659 self.optional_args = optional_args
9660
9663
9665 return self.db._adapter.expand(self)
9666
9669
9670 __rand__ = __and__
9671
9673 return Query(self.db,self.db._adapter.OR,self,other)
9674
9675 __ror__ = __or__
9676
9678 if self.op==self.db._adapter.NOT:
9679 return self.first
9680 return Query(self.db,self.db._adapter.NOT,self)
9681
9683 return repr(self) == repr(other)
9684
9686 return not (self == other)
9687
9688 - def case(self,t=1,f=0):
9689 return self.db._adapter.CASE(self,t,f)
9690
9691 - def as_dict(self, flat=False, sanitize=True):
9692 """Experimental stuff
9693
9694 This allows to return a plain dictionary with the basic
9695 query representation. Can be used with json/xml services
9696 for client-side db I/O
9697
9698 Example:
9699 >>> q = db.auth_user.id != 0
9700 >>> q.as_dict(flat=True)
9701 {"op": "NE", "first":{"tablename": "auth_user",
9702 "fieldname": "id"},
9703 "second":0}
9704 """
9705
9706 SERIALIZABLE_TYPES = (tuple, dict, list, int, long, float,
9707 basestring, type(None), bool)
9708 def loop(d):
9709 newd = dict()
9710 for k, v in d.items():
9711 if k in ("first", "second"):
9712 if isinstance(v, self.__class__):
9713 newd[k] = loop(v.__dict__)
9714 elif isinstance(v, Field):
9715 newd[k] = {"tablename": v._tablename,
9716 "fieldname": v.name}
9717 elif isinstance(v, Expression):
9718 newd[k] = loop(v.__dict__)
9719 elif isinstance(v, SERIALIZABLE_TYPES):
9720 newd[k] = v
9721 elif isinstance(v, (datetime.date,
9722 datetime.time,
9723 datetime.datetime)):
9724 newd[k] = unicode(v)
9725 elif k == "op":
9726 if callable(v):
9727 newd[k] = v.__name__
9728 elif isinstance(v, basestring):
9729 newd[k] = v
9730 else: pass
9731 elif isinstance(v, SERIALIZABLE_TYPES):
9732 if isinstance(v, dict):
9733 newd[k] = loop(v)
9734 else: newd[k] = v
9735 return newd
9736
9737 if flat:
9738 return loop(self.__dict__)
9739 else: return self.__dict__
9740
9741
9742 - def as_xml(self, sanitize=True):
9749
9750 - def as_json(self, sanitize=True):
9757
9759 if not orderby:
9760 return None
9761 orderby2 = orderby[0]
9762 for item in orderby[1:]:
9763 orderby2 = orderby2 | item
9764 return orderby2
9765
9767 return (query and hasattr(query,'ignore_common_filters') and \
9768 not query.ignore_common_filters)
9769
9771
9772 """
9773 a Set represents a set of records in the database,
9774 the records are identified by the query=Query(...) object.
9775 normally the Set is generated by DAL.__call__(Query(...))
9776
9777 given a set, for example
9778 set = db(db.users.name=='Max')
9779 you can:
9780 set.update(db.users.name='Massimo')
9781 set.delete() # all elements in the set
9782 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
9783 and take subsets:
9784 subset = set(db.users.id<5)
9785 """
9786
9787 - def __init__(self, db, query, ignore_common_filters = None):
9788 self.db = db
9789 self._db = db
9790 self.dquery = None
9791
9792
9793 if isinstance(query, dict):
9794 query = self.parse(query)
9795
9796 if not ignore_common_filters is None and \
9797 use_common_filters(query) == ignore_common_filters:
9798 query = copy.copy(query)
9799 query.ignore_common_filters = ignore_common_filters
9800 self.query = query
9801
9804
9805 - def __call__(self, query, ignore_common_filters=False):
9806 if query is None:
9807 return self
9808 elif isinstance(query,Table):
9809 query = self.db._adapter.id_query(query)
9810 elif isinstance(query,str):
9811 query = Expression(self.db,query)
9812 elif isinstance(query,Field):
9813 query = query!=None
9814 if self.query:
9815 return Set(self.db, self.query & query,
9816 ignore_common_filters=ignore_common_filters)
9817 else:
9818 return Set(self.db, query,
9819 ignore_common_filters=ignore_common_filters)
9820
9821 - def _count(self,distinct=None):
9822 return self.db._adapter._count(self.query,distinct)
9823
9824 - def _select(self, *fields, **attributes):
9825 adapter = self.db._adapter
9826 tablenames = adapter.tables(self.query,
9827 attributes.get('join',None),
9828 attributes.get('left',None),
9829 attributes.get('orderby',None),
9830 attributes.get('groupby',None))
9831 fields = adapter.expand_all(fields, tablenames)
9832 return adapter._select(self.query,fields,attributes)
9833
9835 db = self.db
9836 tablename = db._adapter.get_table(self.query)
9837 return db._adapter._delete(tablename,self.query)
9838
9839 - def _update(self, **update_fields):
9844
9845 - def as_dict(self, flat=False, sanitize=True):
9846 if flat:
9847 uid = dbname = uri = None
9848 codec = self.db._db_codec
9849 if not sanitize:
9850 uri, dbname, uid = (self.db._dbname, str(self.db),
9851 self.db._db_uid)
9852 d = {"query": self.query.as_dict(flat=flat)}
9853 d["db"] = {"uid": uid, "codec": codec,
9854 "name": dbname, "uri": uri}
9855 return d
9856 else: return self.__dict__
9857
9858 - def as_xml(self, sanitize=True):
9865
9866 - def as_json(self, sanitize=True):
9873
9874 - def parse(self, dquery):
9875 "Experimental: Turn a dictionary into a Query object"
9876 self.dquery = dquery
9877 return self.build(self.dquery)
9878
9880 "Experimental: see .parse()"
9881 op, first, second = (d["op"], d["first"],
9882 d.get("second", None))
9883 left = right = built = None
9884
9885 if op in ("AND", "OR"):
9886 if not (type(first), type(second)) == (dict, dict):
9887 raise SyntaxError("Invalid AND/OR query")
9888 if op == "AND":
9889 built = self.build(first) & self.build(second)
9890 else: built = self.build(first) | self.build(second)
9891
9892 elif op == "NOT":
9893 if first is None:
9894 raise SyntaxError("Invalid NOT query")
9895 built = ~self.build(first)
9896 else:
9897
9898 for k, v in {"left": first, "right": second}.items():
9899 if isinstance(v, dict) and v.get("op"):
9900 v = self.build(v)
9901 if isinstance(v, dict) and ("tablename" in v):
9902 v = self.db[v["tablename"]][v["fieldname"]]
9903 if k == "left": left = v
9904 else: right = v
9905
9906 if hasattr(self.db._adapter, op):
9907 opm = getattr(self.db._adapter, op)
9908
9909 if op == "EQ": built = left == right
9910 elif op == "NE": built = left != right
9911 elif op == "GT": built = left > right
9912 elif op == "GE": built = left >= right
9913 elif op == "LT": built = left < right
9914 elif op == "LE": built = left <= right
9915 elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
9916 built = Expression(self.db, opm)
9917 elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
9918 "COALESCE_ZERO", "RAW", "INVERT"):
9919 built = Expression(self.db, opm, left)
9920 elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
9921 "REGEXP", "LIKE", "ILIKE", "STARTSWITH",
9922 "ENDSWITH", "ADD", "SUB", "MUL", "DIV",
9923 "MOD", "AS", "ON", "COMMA", "NOT_NULL",
9924 "COALESCE", "CONTAINS", "BELONGS"):
9925 built = Expression(self.db, opm, left, right)
9926
9927 elif not (left or right): built = Expression(self.db, op)
9928 else:
9929 raise SyntaxError("Operator not supported: %s" % op)
9930
9931 return built
9932
9934 return not self.select(limitby=(0,1), orderby_on_limitby=False)
9935
9936 - def count(self,distinct=None, cache=None):
9937 db = self.db
9938 if cache:
9939 cache_model, time_expire = cache
9940 sql = self._count(distinct=distinct)
9941 key = db._uri + '/' + sql
9942 if len(key)>200: key = hashlib_md5(key).hexdigest()
9943 return cache_model(
9944 key,
9945 (lambda self=self,distinct=distinct: \
9946 db._adapter.count(self.query,distinct)),
9947 time_expire)
9948 return db._adapter.count(self.query,distinct)
9949
9950 - def select(self, *fields, **attributes):
9951 adapter = self.db._adapter
9952 tablenames = adapter.tables(self.query,
9953 attributes.get('join',None),
9954 attributes.get('left',None),
9955 attributes.get('orderby',None),
9956 attributes.get('groupby',None))
9957 fields = adapter.expand_all(fields, tablenames)
9958 return adapter.select(self.query,fields,attributes)
9959
9962
9964 db = self.db
9965 tablename = db._adapter.get_table(self.query)
9966 table = db[tablename]
9967 if any(f(self) for f in table._before_delete): return 0
9968 ret = db._adapter.delete(tablename,self.query)
9969 ret and [f(self) for f in table._after_delete]
9970 return ret
9971
9972 - def update(self, **update_fields):
9973 db = self.db
9974 tablename = db._adapter.get_table(self.query)
9975 table = db[tablename]
9976 table._attempt_upload(update_fields)
9977 if any(f(self,update_fields) for f in table._before_update):
9978 return 0
9979 fields = table._listify(update_fields,update=True)
9980 if not fields:
9981 raise SyntaxError("No fields to update")
9982 ret = db._adapter.update(tablename,self.query,fields)
9983 ret and [f(self,update_fields) for f in table._after_update]
9984 return ret
9985
9987 """
9988 same as update but does not call table._before_update and _after_update
9989 """
9990 tablename = self.db._adapter.get_table(self.query)
9991 table = self.db[tablename]
9992 fields = table._listify(update_fields,update=True)
9993 if not fields: raise SyntaxError("No fields to update")
9994 ret = self.db._adapter.update(tablename,self.query,fields)
9995 return ret
9996
9998 tablename = self.db._adapter.get_table(self.query)
9999 response = Row()
10000 response.errors = Row()
10001 new_fields = copy.copy(update_fields)
10002 for key,value in update_fields.iteritems():
10003 value,error = self.db[tablename][key].validate(value)
10004 if error:
10005 response.errors[key] = error
10006 else:
10007 new_fields[key] = value
10008 table = self.db[tablename]
10009 if response.errors:
10010 response.updated = None
10011 else:
10012 if not any(f(self,new_fields) for f in table._before_update):
10013 fields = table._listify(new_fields,update=True)
10014 if not fields: raise SyntaxError("No fields to update")
10015 ret = self.db._adapter.update(tablename,self.query,fields)
10016 ret and [f(self,new_fields) for f in table._after_update]
10017 else:
10018 ret = 0
10019 response.updated = ret
10020 return response
10021
10023 table = self.db[self.db._adapter.tables(self.query)[0]]
10024
10025 if upload_fields:
10026 fields = upload_fields.keys()
10027 else:
10028 fields = table.fields
10029 fields = [f for f in fields if table[f].type == 'upload'
10030 and table[f].uploadfield == True
10031 and table[f].autodelete]
10032 if not fields:
10033 return False
10034 for record in self.select(*[table[f] for f in fields]):
10035 for fieldname in fields:
10036 field = table[fieldname]
10037 oldname = record.get(fieldname, None)
10038 if not oldname:
10039 continue
10040 if upload_fields and oldname == upload_fields[fieldname]:
10041 continue
10042 if field.custom_delete:
10043 field.custom_delete(oldname)
10044 else:
10045 uploadfolder = field.uploadfolder
10046 if not uploadfolder:
10047 uploadfolder = pjoin(
10048 self.db._adapter.folder, '..', 'uploads')
10049 if field.uploadseparate:
10050 items = oldname.split('.')
10051 uploadfolder = pjoin(
10052 uploadfolder,
10053 "%s.%s" % (items[0], items[1]),
10054 items[2][:2])
10055 oldpath = pjoin(uploadfolder, oldname)
10056 if exists(oldpath):
10057 os.unlink(oldpath)
10058 return False
10059
10061 - def __init__(self, colset, table, id):
10062 self.colset, self.db, self.tablename, self.id = \
10063 colset, table._db, table._tablename, id
10064
10066 colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
10067 table = db[tablename]
10068 newfields = fields or dict(colset)
10069 for fieldname in newfields.keys():
10070 if not fieldname in table.fields or table[fieldname].type=='id':
10071 del newfields[fieldname]
10072 table._db(table._id==id,ignore_common_filters=True).update(**newfields)
10073 colset.update(newfields)
10074 return colset
10075
10078 self.db, self.tablename, self.id = table._db, table._tablename, id
10080 return self.db(self.db[self.tablename]._id==self.id).delete()
10081
10084 self.db, self.tablename, self.fieldname, self.id = \
10085 field.db, field._tablename, field.name, id
10087 query = self.db[self.tablename][self.fieldname]==self.id
10088 return Set(self.db,query)
10091 - def __call__(self, query, ignore_common_filters=False):
10092 return self._getset()(query, ignore_common_filters)
10093 - def _count(self,distinct=None):
10095 - def _select(self, *fields, **attributes):
10099 - def _update(self, **update_fields):
10103 - def count(self,distinct=None, cache=None):
10105 - def select(self, *fields, **attributes):
10111 - def update(self, **update_fields):
10119
10122 self.method=method
10123 self.row=row
10125 return self.method(self.row,*args,**kwargs)
10126
10128 f.__lazy__ = True
10129 return f
10130
10131 -class Rows(object):
10132
10133 """
10134 A wrapper for the return value of a select. It basically represents a table.
10135 It has an iterator and each row is represented as a dictionary.
10136 """
10137
10138
10139
10140 - def __init__(
10141 self,
10142 db=None,
10143 records=[],
10144 colnames=[],
10145 compact=True,
10146 rawrows=None
10147 ):
10148 self.db = db
10149 self.records = records
10150 self.colnames = colnames
10151 self.compact = compact
10152 self.response = rawrows
10153
10155 return '<Rows (%s)>' % len(self.records)
10156
10158 """
10159 db.define_table('x',Field('number','integer'))
10160 if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
10161
10162 from gluon.dal import lazy_virtualfield
10163
10164 class MyVirtualFields(object):
10165 # normal virtual field (backward compatible, discouraged)
10166 def normal_shift(self): return self.x.number+1
10167 # lazy virtual field (because of @staticmethod)
10168 @lazy_virtualfield
10169 def lazy_shift(instance,row,delta=4): return row.x.number+delta
10170 db.x.virtualfields.append(MyVirtualFields())
10171
10172 for row in db(db.x).select():
10173 print row.number, row.normal_shift, row.lazy_shift(delta=7)
10174 """
10175 if not keyed_virtualfields:
10176 return self
10177 for row in self.records:
10178 for (tablename,virtualfields) in keyed_virtualfields.iteritems():
10179 attributes = dir(virtualfields)
10180 if not tablename in row:
10181 box = row[tablename] = Row()
10182 else:
10183 box = row[tablename]
10184 updated = False
10185 for attribute in attributes:
10186 if attribute[0] != '_':
10187 method = getattr(virtualfields,attribute)
10188 if hasattr(method,'__lazy__'):
10189 box[attribute]=VirtualCommand(method,row)
10190 elif type(method)==types.MethodType:
10191 if not updated:
10192 virtualfields.__dict__.update(row)
10193 updated = True
10194 box[attribute]=method()
10195 return self
10196
10198 if self.colnames!=other.colnames:
10199 raise Exception('Cannot & incompatible Rows objects')
10200 records = self.records+other.records
10201 return Rows(self.db,records,self.colnames)
10202
10204 if self.colnames!=other.colnames:
10205 raise Exception('Cannot | incompatible Rows objects')
10206 records = self.records
10207 records += [record for record in other.records \
10208 if not record in records]
10209 return Rows(self.db,records,self.colnames)
10210
10212 if len(self.records):
10213 return 1
10214 return 0
10215
10217 return len(self.records)
10218
10220 return Rows(self.db,self.records[a:b],self.colnames)
10221
10223 row = self.records[i]
10224 keys = row.keys()
10225 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10226 return row[row.keys()[0]]
10227 return row
10228
10230 """
10231 iterator over records
10232 """
10233
10234 for i in xrange(len(self)):
10235 yield self[i]
10236
10238 """
10239 serializes the table into a csv file
10240 """
10241
10242 s = StringIO.StringIO()
10243 self.export_to_csv_file(s)
10244 return s.getvalue()
10245
10247 if not self.records:
10248 return None
10249 return self[0]
10250
10252 if not self.records:
10253 return None
10254 return self[-1]
10255
10256 - def find(self,f,limitby=None):
10257 """
10258 returns a new Rows object, a subset of the original object,
10259 filtered by the function f
10260 """
10261 if not self:
10262 return Rows(self.db, [], self.colnames)
10263 records = []
10264 if limitby:
10265 a,b = limitby
10266 else:
10267 a,b = 0,len(self)
10268 k = 0
10269 for row in self:
10270 if f(row):
10271 if a<=k: records.append(row)
10272 k += 1
10273 if k==b: break
10274 return Rows(self.db, records, self.colnames)
10275
10277 """
10278 removes elements from the calling Rows object, filtered by the function f,
10279 and returns a new Rows object containing the removed elements
10280 """
10281 if not self.records:
10282 return Rows(self.db, [], self.colnames)
10283 removed = []
10284 i=0
10285 while i<len(self):
10286 row = self[i]
10287 if f(row):
10288 removed.append(self.records[i])
10289 del self.records[i]
10290 else:
10291 i += 1
10292 return Rows(self.db, removed, self.colnames)
10293
10294 - def sort(self, f, reverse=False):
10295 """
10296 returns a list of sorted elements (not sorted in place)
10297 """
10298 rows = Rows(self.db,[],self.colnames,compact=False)
10299 rows.records = sorted(self,key=f,reverse=reverse)
10300 return rows
10301
10302
10304 """
10305 regroups the rows, by one of the fields
10306 """
10307 if not self.records:
10308 return {}
10309 key = str(field)
10310 grouped_row_group = dict()
10311
10312 for row in self:
10313 value = row[key]
10314 if not value in grouped_row_group:
10315 grouped_row_group[value] = [row]
10316 else:
10317 grouped_row_group[value].append(row)
10318 return grouped_row_group
10319
10320 - def as_list(self,
10321 compact=True,
10322 storage_to_dict=True,
10323 datetime_to_str=True,
10324 custom_types=None):
10325 """
10326 returns the data as a list or dictionary.
10327 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10328 :param datetime_to_str: convert datetime fields as strings (default True)
10329 """
10330 (oc, self.compact) = (self.compact, compact)
10331 if storage_to_dict:
10332 items = [item.as_dict(datetime_to_str, custom_types) for item in self]
10333 else:
10334 items = [item for item in self]
10335 self.compact = compact
10336 return items
10337
10338
10339 - def as_dict(self,
10340 key='id',
10341 compact=True,
10342 storage_to_dict=True,
10343 datetime_to_str=True,
10344 custom_types=None):
10345 """
10346 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
10347
10348 :param key: the name of the field to be used as dict key, normally the id
10349 :param compact: ? (default True)
10350 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10351 :param datetime_to_str: convert datetime fields as strings (default True)
10352 """
10353
10354
10355 multi = False
10356 f = self.first()
10357 if f and isinstance(key, basestring):
10358 multi = any([isinstance(v, f.__class__) for v in f.values()])
10359 if (not "." in key) and multi:
10360
10361 def new_key():
10362 i = 0
10363 while True:
10364 yield i
10365 i += 1
10366 key_generator = new_key()
10367 key = lambda r: key_generator.next()
10368
10369 rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types)
10370 if isinstance(key,str) and key.count('.')==1:
10371 (table, field) = key.split('.')
10372 return dict([(r[table][field],r) for r in rows])
10373 elif isinstance(key,str):
10374 return dict([(r[key],r) for r in rows])
10375 else:
10376 return dict([(key(r),r) for r in rows])
10377
10379 """
10380 export data to csv, the first line contains the column names
10381
10382 :param ofile: where the csv must be exported to
10383 :param null: how null values must be represented (default '<NULL>')
10384 :param delimiter: delimiter to separate values (default ',')
10385 :param quotechar: character to use to quote string values (default '"')
10386 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
10387 :param represent: use the fields .represent value (default False)
10388 :param colnames: list of column names to use (default self.colnames)
10389 This will only work when exporting rows objects!!!!
10390 DO NOT use this with db.export_to_csv()
10391 """
10392 delimiter = kwargs.get('delimiter', ',')
10393 quotechar = kwargs.get('quotechar', '"')
10394 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
10395 represent = kwargs.get('represent', False)
10396 writer = csv.writer(ofile, delimiter=delimiter,
10397 quotechar=quotechar, quoting=quoting)
10398 colnames = kwargs.get('colnames', self.colnames)
10399 write_colnames = kwargs.get('write_colnames',True)
10400
10401 if write_colnames:
10402 writer.writerow(colnames)
10403
10404 def none_exception(value):
10405 """
10406 returns a cleaned up value that can be used for csv export:
10407 - unicode text is encoded as such
10408 - None values are replaced with the given representation (default <NULL>)
10409 """
10410 if value is None:
10411 return null
10412 elif isinstance(value, unicode):
10413 return value.encode('utf8')
10414 elif isinstance(value,Reference):
10415 return long(value)
10416 elif hasattr(value, 'isoformat'):
10417 return value.isoformat()[:19].replace('T', ' ')
10418 elif isinstance(value, (list,tuple)):
10419 return bar_encode(value)
10420 return value
10421
10422 for record in self:
10423 row = []
10424 for col in colnames:
10425 if not REGEX_TABLE_DOT_FIELD.match(col):
10426 row.append(record._extra[col])
10427 else:
10428 (t, f) = col.split('.')
10429 field = self.db[t][f]
10430 if isinstance(record.get(t, None), (Row,dict)):
10431 value = record[t][f]
10432 else:
10433 value = record[f]
10434 if field.type=='blob' and not value is None:
10435 value = base64.b64encode(value)
10436 elif represent and field.represent:
10437 value = field.represent(value)
10438 row.append(none_exception(value))
10439 writer.writerow(row)
10440
10441 - def xml(self,strict=False,row_name='row',rows_name='rows'):
10442 """
10443 serializes the table using sqlhtml.SQLTABLE (if present)
10444 """
10445
10446 if strict:
10447 ncols = len(self.colnames)
10448 return '<%s>\n%s\n</%s>' % (rows_name,
10449 '\n'.join(row.as_xml(row_name=row_name,
10450 colnames=self.colnames) for
10451 row in self), rows_name)
10452
10453 import sqlhtml
10454 return sqlhtml.SQLTABLE(self).xml()
10455
10456 - def as_xml(self,row_name='row',rows_name='rows'):
10457 return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
10458
10459 - def as_json(self, mode='object', default=None):
10477
10478
10479 as_csv = __str__
10480 json = as_json
10481
10482
10483
10484
10485
10486 -def test_all():
10487 """
10488
10489 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
10490 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
10491 >>> tmp = db.define_table('users',\
10492 Field('stringf', 'string', length=32, required=True),\
10493 Field('booleanf', 'boolean', default=False),\
10494 Field('passwordf', 'password', notnull=True),\
10495 Field('uploadf', 'upload'),\
10496 Field('blobf', 'blob'),\
10497 Field('integerf', 'integer', unique=True),\
10498 Field('doublef', 'double', unique=True,notnull=True),\
10499 Field('jsonf', 'json'),\
10500 Field('datef', 'date', default=datetime.date.today()),\
10501 Field('timef', 'time'),\
10502 Field('datetimef', 'datetime'),\
10503 migrate='test_user.table')
10504
10505 Insert a field
10506
10507 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
10508 uploadf=None, integerf=5, doublef=3.14,\
10509 jsonf={"j": True},\
10510 datef=datetime.date(2001, 1, 1),\
10511 timef=datetime.time(12, 30, 15),\
10512 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
10513 1
10514
10515 Drop the table
10516
10517 >>> db.users.drop()
10518
10519 Examples of insert, select, update, delete
10520
10521 >>> tmp = db.define_table('person',\
10522 Field('name'),\
10523 Field('birth','date'),\
10524 migrate='test_person.table')
10525 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
10526 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
10527
10528 commented len(db().select(db.person.ALL))
10529 commented 2
10530
10531 >>> me = db(db.person.id==person_id).select()[0] # test select
10532 >>> me.name
10533 'Massimo'
10534 >>> db.person[2].name
10535 'Massimo'
10536 >>> db.person(2).name
10537 'Massimo'
10538 >>> db.person(name='Massimo').name
10539 'Massimo'
10540 >>> db.person(db.person.name=='Massimo').name
10541 'Massimo'
10542 >>> row = db.person[2]
10543 >>> row.name == row['name'] == row['person.name'] == row('person.name')
10544 True
10545 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
10546 1
10547 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
10548 1
10549
10550 Update a single record
10551
10552 >>> me.update_record(name=\"Max\")
10553 <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}>
10554 >>> me.name
10555 'Max'
10556
10557 Examples of complex search conditions
10558
10559 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
10560 1
10561 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
10562 1
10563 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
10564 1
10565 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
10566 >>> me.name
10567 'Max'
10568
10569 Examples of search conditions using extract from date/datetime/time
10570
10571 >>> len(db(db.person.birth.month()==12).select())
10572 1
10573 >>> len(db(db.person.birth.year()>1900).select())
10574 1
10575
10576 Example of usage of NULL
10577
10578 >>> len(db(db.person.birth==None).select()) ### test NULL
10579 0
10580 >>> len(db(db.person.birth!=None).select()) ### test NULL
10581 1
10582
10583 Examples of search conditions using lower, upper, and like
10584
10585 >>> len(db(db.person.name.upper()=='MAX').select())
10586 1
10587 >>> len(db(db.person.name.like('%ax')).select())
10588 1
10589 >>> len(db(db.person.name.upper().like('%AX')).select())
10590 1
10591 >>> len(db(~db.person.name.upper().like('%AX')).select())
10592 0
10593
10594 orderby, groupby and limitby
10595
10596 >>> people = db().select(db.person.name, orderby=db.person.name)
10597 >>> order = db.person.name|~db.person.birth
10598 >>> people = db().select(db.person.name, orderby=order)
10599
10600 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
10601
10602 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
10603
10604 Example of one 2 many relation
10605
10606 >>> tmp = db.define_table('dog',\
10607 Field('name'),\
10608 Field('birth','date'),\
10609 Field('owner',db.person),\
10610 migrate='test_dog.table')
10611 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
10612 1
10613
10614 A simple JOIN
10615
10616 >>> len(db(db.dog.owner==db.person.id).select())
10617 1
10618
10619 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
10620 1
10621
10622 Drop tables
10623
10624 >>> db.dog.drop()
10625 >>> db.person.drop()
10626
10627 Example of many 2 many relation and Set
10628
10629 >>> tmp = db.define_table('author', Field('name'),\
10630 migrate='test_author.table')
10631 >>> tmp = db.define_table('paper', Field('title'),\
10632 migrate='test_paper.table')
10633 >>> tmp = db.define_table('authorship',\
10634 Field('author_id', db.author),\
10635 Field('paper_id', db.paper),\
10636 migrate='test_authorship.table')
10637 >>> aid = db.author.insert(name='Massimo')
10638 >>> pid = db.paper.insert(title='QCD')
10639 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
10640
10641 Define a Set
10642
10643 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
10644 >>> rows = authored_papers.select(db.author.name, db.paper.title)
10645 >>> for row in rows: print row.author.name, row.paper.title
10646 Massimo QCD
10647
10648 Example of search condition using belongs
10649
10650 >>> set = (1, 2, 3)
10651 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
10652 >>> print rows[0].title
10653 QCD
10654
10655 Example of search condition using nested select
10656
10657 >>> nested_select = db()._select(db.authorship.paper_id)
10658 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
10659 >>> print rows[0].title
10660 QCD
10661
10662 Example of expressions
10663
10664 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
10665 >>> db(mynumber).delete()
10666 0
10667 >>> for i in range(10): tmp = mynumber.insert(x=i)
10668 >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum())
10669 45
10670
10671 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
10672 5
10673
10674 Output in csv
10675
10676 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
10677 author.name,paper.title\r
10678 Massimo,QCD
10679
10680 Delete all leftover tables
10681
10682 >>> DAL.distributed_transaction_commit(db)
10683
10684 >>> db.mynumber.drop()
10685 >>> db.authorship.drop()
10686 >>> db.author.drop()
10687 >>> db.paper.drop()
10688 """
10689
10690
10691
10692
10693 SQLField = Field
10694 SQLTable = Table
10695 SQLXorable = Expression
10696 SQLQuery = Query
10697 SQLSet = Set
10698 SQLRows = Rows
10699 SQLStorage = Row
10700 SQLDB = DAL
10701 GQLDB = DAL
10702 DAL.Field = Field
10703 DAL.Table = Table
10704
10705
10706
10707
10708
10709 -def geoPoint(x,y):
10710 return "POINT (%f %f)" % (x,y)
10711
10713 return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
10714
10716 return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
10717
10718
10719
10720
10721
10722 if __name__ == '__main__':
10723 import doctest
10724 doctest.testmod()
10725