Package web2py :: Package gluon :: Module sql_old
[hide private]
[frames] | no frames]

Source Code for Module web2py.gluon.sql_old

   1  #!/bin/env python 
   2  # -*- coding: utf-8 -*- 
   3   
   4  """ 
   5  This file is part of web2py Web Framework (Copyrighted, 2007-2010). 
   6  Developed by Massimo Di Pierro <mdipierro@cs.depaul.edu>. 
   7  License: GPL v2 
   8   
   9  Thanks to 
  10      * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support 
  11      * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support 
  12      * Denes 
  13      * Chris Clark 
  14   
  15  This file contains the DAL support for many relational databases, 
  16  including SQLite, MySQL, Postgres, Oracle, MS SQL, DB2, Interbase. 
  17  Adding Ingres - clach04 
  18  """ 
  19   
  20  __all__ = ['DAL', 'Field'] 
  21   
  22  import re 
  23  import sys 
  24  import locale 
  25  import os 
  26  import time 
  27  import types 
  28  import cPickle 
  29  import datetime 
  30  import threading 
  31  import cStringIO 
  32  import csv 
  33  import copy 
  34  import socket 
  35  import logging 
  36  import copy_reg 
  37  import base64 
  38  import shutil 
  39  import marshal 
  40  import decimal 
  41  import struct 
  42   
  43  from utils import md5_hash, web2py_uuid 
  44  from serializers import json 
  45  from http import HTTP 
  46   
  47  logger = logging.getLogger("web2py.sql") 
  48   
  49  DEFAULT = lambda:0 
  50   
  51  # internal representation of tables with field 
  52  #  <table>.<field>, tables and fields may only be [a-zA-Z0-0_] 
  53   
  54  table_field = re.compile('[\w_]+\.[\w_]+') 
  55  oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))") 
  56  regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$') 
  57  regex_cleanup_fn = re.compile('[\'"\s;]+') 
  58  string_unpack=re.compile('(?<!\|)\|(?!\|)') 
  59   
  60  # list of drivers will be built on the fly 
  61  # and lists only what is available 
  62  drivers = [] 
  63  try: 
  64      from pysqlite2 import dbapi2 as sqlite3 
  65      drivers.append('pysqlite2') 
  66  except: 
  67      try: 
  68          from sqlite3 import dbapi2 as sqlite3 
  69          drivers.append('SQLite3') 
  70      except: 
  71          logger.debug('no sqlite3 or pysqlite2.dbapi2 driver') 
  72   
  73  try: 
  74      import MySQLdb 
  75      drivers.append('MySQL') 
  76  except: 
  77      logger.debug('no MySQLdb driver') 
  78   
  79  try: 
  80      import psycopg2 
  81      drivers.append('PostgreSQL') 
  82  except: 
  83      logger.debug('no psycopg2 driver') 
  84   
  85  try: 
  86      import cx_Oracle 
  87      drivers.append('Oracle') 
  88  except: 
  89      logger.debug('no cx_Oracle driver') 
  90   
  91  try: 
  92      import pyodbc 
  93      drivers.append('MSSQL/DB2') 
  94  except: 
  95      logger.debug('no MSSQL/DB2 driver') 
  96   
  97  try: 
  98      import kinterbasdb 
  99      drivers.append('Interbase') 
 100  except: 
 101      logger.debug('no kinterbasdb driver') 
 102   
 103  try: 
 104      import informixdb 
 105      drivers.append('Informix') 
 106      logger.warning('Informix support is experimental') 
 107  except: 
 108      logger.debug('no informixdb driver') 
 109   
 110  try: 
 111      from com.ziclix.python.sql import zxJDBC 
 112      import java.sql 
 113      from org.sqlite import JDBC 
 114      drivers.append('zxJDBC') 
 115      logger.warning('zxJDBC support is experimental') 
 116      is_jdbc = True 
 117  except: 
 118      logger.debug('no zxJDBC driver') 
 119      is_jdbc = False 
 120   
 121  try: 
 122      import ingresdbi 
 123      drivers.append('Ingres') 
 124  except: 
 125      logger.debug('no Ingres driver') 
 126      # NOTE could try JDBC....... 
 127   
 128   
 129  import portalocker 
 130  import validators 
 131   
 132  sql_locker = threading.RLock() 
 133  thread = threading.local() 
 134   
 135  INGRES_SEQNAME='ii***lineitemsequence' # NOTE invalid database object name (ANSI-SQL wants this form of name to be a delimited identifier) 
136 -def gen_ingres_sequencename(table_name):
137 """Generate Ingres specific sequencename, pass in self._tablename 138 """ 139 result='%s_iisq' % (table_name) 140 # if result len too long, hash and use hexhash? 141 return result
142 143 # mapping of the field types and some constructs 144 # per database 145 SQL_DIALECTS = { 146 'sqlite': { 147 'boolean': 'CHAR(1)', 148 'string': 'CHAR(%(length)s)', 149 'text': 'TEXT', 150 'password': 'CHAR(%(length)s)', 151 'blob': 'BLOB', 152 'upload': 'CHAR(%(length)s)', 153 'integer': 'INTEGER', 154 'double': 'DOUBLE', 155 'decimal': 'DOUBLE', 156 'date': 'DATE', 157 'time': 'TIME', 158 'datetime': 'TIMESTAMP', 159 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT', 160 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 161 'list:integer': 'TEXT', 162 'list:string': 'TEXT', 163 'list:reference': 'TEXT', 164 'lower': 'LOWER(%(field)s)', 165 'upper': 'UPPER(%(field)s)', 166 'is null': 'IS NULL', 167 'is not null': 'IS NOT NULL', 168 'extract': "web2py_extract('%(name)s',%(field)s)", 169 'left join': 'LEFT JOIN', 170 'random': 'Random()', 171 'notnull': 'NOT NULL DEFAULT %(default)s', 172 'substring': 'SUBSTR(%(field)s,%(pos)s,%(length)s)', 173 'primarykey': 'PRIMARY KEY (%s)' 174 }, 175 'mysql': { 176 'boolean': 'CHAR(1)', 177 'string': 'VARCHAR(%(length)s)', 178 'text': 'LONGTEXT', 179 'password': 'VARCHAR(%(length)s)', 180 'blob': 'LONGBLOB', 181 'upload': 'VARCHAR(%(length)s)', 182 'integer': 'INT', 183 'double': 'DOUBLE', 184 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 185 'date': 'DATE', 186 'time': 'TIME', 187 'datetime': 'DATETIME', 188 'id': 'INT AUTO_INCREMENT NOT NULL', 189 'autoincrement': 'INT AUTO_INCREMENT NOT NULL', 190 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 191 'list:integer': 'LONGTEXT', 192 'list:string': 'LONGTEXT', 193 'list:reference': 'LONGTEXT', 194 'lower': 'LOWER(%(field)s)', 195 'upper': 'UPPER(%(field)s)', 196 'is null': 'IS NULL', 197 'is not null': 'IS NOT NULL', 198 'extract': 'EXTRACT(%(name)s FROM %(field)s)', 199 'left join': 'LEFT JOIN', 200 'random': 'RAND()', 201 'notnull': 'NOT NULL DEFAULT %(default)s', 202 'substring': 'SUBSTRING(%(field)s,%(pos)s,%(length)s)', 203 }, 204 'postgres': { 205 'boolean': 'CHAR(1)', 206 'string': 'VARCHAR(%(length)s)', 207 'text': 'TEXT', 208 'password': 'VARCHAR(%(length)s)', 209 'blob': 'BYTEA', 210 'upload': 'VARCHAR(%(length)s)', 211 'integer': 'INTEGER', 212 'double': 'FLOAT8', 213 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 214 'date': 'DATE', 215 'time': 'TIME', 216 'datetime': 'TIMESTAMP', 217 'id': 'SERIAL PRIMARY KEY', 218 'autoincrement': 'INTEGER DEFAULT SERIAL', 219 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 220 'list:integer': 'TEXT', 221 'list:string': 'TEXT', 222 'list:reference': 'TEXT', 223 'lower': 'LOWER(%(field)s)', 224 'upper': 'UPPER(%(field)s)', 225 'is null': 'IS NULL', 226 'is not null': 'IS NOT NULL', 227 'extract': 'EXTRACT(%(name)s FROM %(field)s)', 228 'left join': 'LEFT JOIN', 229 'random': 'RANDOM()', 230 'notnull': 'NOT NULL DEFAULT %(default)s', 231 'substring': 'SUBSTR(%(field)s,%(pos)s,%(length)s)', 232 }, 233 'oracle': { 234 'boolean': 'CHAR(1)', 235 'string': 'VARCHAR2(%(length)s)', 236 'text': 'CLOB', 237 'password': 'VARCHAR2(%(length)s)', 238 'blob': 'CLOB', 239 'upload': 'VARCHAR2(%(length)s)', 240 'integer': 'INT', 241 'double': 'FLOAT', 242 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 243 'date': 'DATE', 244 'time': 'CHAR(8)', 245 'datetime': 'DATE', 246 'id': 'NUMBER PRIMARY KEY', 247 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 248 'list:integer': 'CLOB', 249 'list:string': 'CLOB', 250 'list:reference': 'CLOB', 251 'lower': 'LOWER(%(field)s)', 252 'upper': 'UPPER(%(field)s)', 253 'is null': 'IS NULL', 254 'is not null': 'IS NOT NULL', 255 'extract': 'EXTRACT(%(name)s FROM %(field)s)', 256 'left join': 'LEFT OUTER JOIN', 257 'random': 'dbms_random.value', 258 'notnull': 'DEFAULT %(default)s NOT NULL', 259 'substring': 'SUBSTR(%(field)s,%(pos)s,%(length)s)', 260 }, 261 'mssql': { 262 'boolean': 'BIT', 263 'string': 'VARCHAR(%(length)s)', 264 'text': 'TEXT', 265 'password': 'VARCHAR(%(length)s)', 266 'blob': 'IMAGE', 267 'upload': 'VARCHAR(%(length)s)', 268 'integer': 'INT', 269 'double': 'FLOAT', 270 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 271 'date': 'DATETIME', 272 'time': 'CHAR(8)', 273 'datetime': 'DATETIME', 274 'id': 'INT IDENTITY PRIMARY KEY', 275 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 276 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 277 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 278 'list:integer': 'TEXT', 279 'list:string': 'TEXT', 280 'list:reference': 'TEXT', 281 'lower': 'LOWER(%(field)s)', 282 'upper': 'UPPER(%(field)s)', 283 'is null': 'IS NULL', 284 'is not null': 'IS NOT NULL', 285 'extract': 'DATEPART(%(name)s,%(field)s)', 286 'left join': 'LEFT OUTER JOIN', 287 'random': 'NEWID()', 288 'notnull': 'NOT NULL DEFAULT %(default)s', 289 'substring': 'SUBSTRING(%(field)s,%(pos)s,%(length)s)', 290 'primarykey': 'PRIMARY KEY CLUSTERED (%s)' 291 #' WITH( STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON)' 292 }, 293 'mssql2': { # MS SQL unicode 294 'boolean': 'CHAR(1)', 295 'string': 'NVARCHAR(%(length)s)', 296 'text': 'NTEXT', 297 'password': 'NVARCHAR(%(length)s)', 298 'blob': 'IMAGE', 299 'upload': 'NVARCHAR(%(length)s)', 300 'integer': 'INT', 301 'double': 'FLOAT', 302 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 303 'date': 'DATETIME', 304 'time': 'CHAR(8)', 305 'datetime': 'DATETIME', 306 'id': 'INT IDENTITY PRIMARY KEY', 307 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 308 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 309 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 310 'list:integer': 'NTEXT', 311 'list:string': 'NTEXT', 312 'list:reference': 'NTEXT', 313 'lower': 'LOWER(%(field)s)', 314 'upper': 'UPPER(%(field)s)', 315 'is null': 'IS NULL', 316 'is not null': 'IS NOT NULL', 317 'extract': 'DATEPART(%(name)s,%(field)s)', 318 'left join': 'LEFT OUTER JOIN', 319 'random': 'NEWID()', 320 'notnull': 'NOT NULL DEFAULT %(default)s', 321 'substring': 'SUBSTRING(%(field)s,%(pos)s,%(length)s)', 322 }, 323 'firebird': { 324 'boolean': 'CHAR(1)', 325 'string': 'VARCHAR(%(length)s)', 326 'text': 'BLOB SUB_TYPE 1', 327 'password': 'VARCHAR(%(length)s)', 328 'blob': 'BLOB SUB_TYPE 0', 329 'upload': 'VARCHAR(%(length)s)', 330 'integer': 'INTEGER', 331 'double': 'DOUBLE PRECISION', 332 'decimal': 'DECIMAL(%(precision)s,%(scale)s)', 333 'date': 'DATE', 334 'time': 'TIME', 335 'datetime': 'TIMESTAMP', 336 'id': 'INTEGER PRIMARY KEY', 337 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 338 'list:integer': 'BLOB SUB_TYPE 1', 339 'list:string': 'BLOB SUB_TYPE 1', 340 'list:reference': 'BLOB SUB_TYPE 1', 341 'lower': 'LOWER(%(field)s)', 342 'upper': 'UPPER(%(field)s)', 343 'is null': 'IS NULL', 344 'is not null': 'IS NOT NULL', 345 'extract': 'EXTRACT(%(name)s FROM %(field)s)', 346 'left join': 'LEFT JOIN', 347 'random': 'RAND()', 348 'notnull': 'DEFAULT %(default)s NOT NULL', 349 'substring': 'SUBSTRING(%(field)s from %(pos)s for %(length)s)', 350 }, 351 'informix': { 352 'boolean': 'CHAR(1)', 353 'string': 'VARCHAR(%(length)s)', 354 'text': 'BLOB SUB_TYPE 1', 355 'password': 'VARCHAR(%(length)s)', 356 'blob': 'BLOB SUB_TYPE 0', 357 'upload': 'VARCHAR(%(length)s)', 358 'integer': 'INTEGER', 359 'double': 'FLOAT', 360 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 361 'date': 'DATE', 362 'time': 'CHAR(8)', 363 'datetime': 'DATETIME', 364 'id': 'SERIAL', 365 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 366 'reference FK': 'REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s CONSTRAINT FK_%(table_name)s_%(field_name)s', 367 'reference TFK': 'FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s CONSTRAINT TFK_%(table_name)s_%(field_name)s', 368 'list:integer': 'BLOB SUB_TYPE 1', 369 'list:string': 'BLOB SUB_TYPE 1', 370 'list:reference': 'BLOB SUB_TYPE 1', 371 'lower': 'LOWER(%(field)s)', 372 'upper': 'UPPER(%(field)s)', 373 'is null': 'IS NULL', 374 'is not null': 'IS NOT NULL', 375 'extract': 'EXTRACT(%(field)s(%(name)s)', 376 'left join': 'LEFT JOIN', 377 'random': 'RANDOM()', 378 'notnull': 'DEFAULT %(default)s NOT NULL', 379 'substring': 'SUBSTR(%(field)s,%(pos)s,%(length)s)', 380 }, 381 'db2': { 382 'boolean': 'CHAR(1)', 383 'string': 'VARCHAR(%(length)s)', 384 'text': 'CLOB', 385 'password': 'VARCHAR(%(length)s)', 386 'blob': 'BLOB', 387 'upload': 'VARCHAR(%(length)s)', 388 'integer': 'INT', 389 'double': 'DOUBLE', 390 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 391 'date': 'DATE', 392 'time': 'TIME', 393 'datetime': 'TIMESTAMP', 394 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL', 395 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 396 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 397 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 398 'list:integer': 'CLOB', 399 'list:string': 'CLOB', 400 'list:reference': 'CLOB', 401 'lower': 'LOWER(%(field)s)', 402 'upper': 'UPPER(%(field)s)', 403 'is null': 'IS NULL', 404 'is not null': 'IS NOT NULL', 405 'extract': 'EXTRACT(%(name)s FROM %(field)s)', 406 'left join': 'LEFT OUTER JOIN', 407 'random': 'RAND()', 408 'notnull': 'NOT NULL DEFAULT %(default)s', 409 'substring': 'SUBSTR(%(field)s,%(pos)s,%(length)s)', 410 'primarykey': 'PRIMARY KEY(%s)', 411 }, 412 'ingres': { 413 'boolean': 'CHAR(1)', 414 'string': 'VARCHAR(%(length)s)', 415 'text': 'CLOB', 416 'password': 'VARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes? 417 'blob': 'BLOB', 418 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type? 419 'integer': 'INTEGER4', # or int8... 420 'double': 'FLOAT8', 421 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 422 'date': 'ANSIDATE', 423 'time': 'TIME WITHOUT TIME ZONE', 424 'datetime': 'TIMESTAMP WITHOUT TIME ZONE', 425 'id': 'integer4 not null unique with default next value for %s'%INGRES_SEQNAME, 426 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 427 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 428 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO 429 'list:integer': 'CLOB', 430 'list:string': 'CLOB', 431 'list:reference': 'CLOB', 432 'lower': 'LOWER(%(field)s)', 433 'upper': 'UPPER(%(field)s)', 434 'is null': 'IS NULL', 435 'is not null': 'IS NOT NULL', 436 'extract': 'EXTRACT(%(name)s FROM %(field)s)', # Date/time/timestamp related. Use DatePart for older Ingres releases 437 'left join': 'LEFT OUTER JOIN', 438 'random': 'RANDOM()', 439 'notnull': 'NOT NULL DEFAULT %(default)s', 440 'substring': 'SUBSTR(%(field)s,%(pos)s,%(length)s)', 441 'primarykey': 'PRIMARY KEY(%s)', 442 }, 443 } 444 445 INGRES_USE_UNICODE_STRING_TYPES=True 446 if INGRES_USE_UNICODE_STRING_TYPES: 447 """convert type VARCHAR -> NVARCHAR, i.e. use UCS2/UTF16 support/storage 448 leaving as VARCHAR means need to use UTF8 encoding. 449 Some people are very passionate about which encoding 450 to use for storage, this gives the option. 451 """ 452 for x in ['string', 'password', 'text']: 453 SQL_DIALECTS['ingres'][x] = 'N' + SQL_DIALECTS['ingres'][x] 454
455 -def sqlhtml_validators(field):
456 """ 457 Field type validation, using web2py's validators mechanism. 458 459 makes sure the content of a field is in line with the declared 460 fieldtype 461 """ 462 field_type, field_length = field.type, field.length 463 if isinstance(field_type, SQLCustomType): 464 if hasattr(field_type, 'validator'): 465 return field_type.validator 466 else: 467 field_type = field_type.type 468 elif not isinstance(field_type,str): 469 return [] 470 requires=[] 471 def ff(r,id): 472 row=r[id] 473 if not row: 474 return id 475 elif hasattr(r, '_format') and isinstance(r._format,str): 476 return r._format % row 477 elif hasattr(r, '_format') and callable(r._format): 478 return r._format(row) 479 else: 480 return id
481 482 if field_type == 'string': 483 requires.append(validators.IS_LENGTH(field_length)) 484 elif field_type == 'text': 485 requires.append(validators.IS_LENGTH(2 ** 16)) 486 elif field_type == 'password': 487 requires.append(validators.IS_LENGTH(field_length)) 488 elif field_type == 'double': 489 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100)) 490 elif field_type == 'integer': 491 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100)) 492 elif field_type.startswith('decimal'): 493 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10)) 494 elif field_type == 'date': 495 requires.append(validators.IS_DATE()) 496 elif field_type == 'time': 497 requires.append(validators.IS_TIME()) 498 elif field_type == 'datetime': 499 requires.append(validators.IS_DATETIME()) 500 elif field._db and field_type.startswith('reference') and \ 501 field_type.find('.') < 0 and \ 502 field_type[10:] in field._db.tables: 503 referenced = field._db[field_type[10:]] 504 field.represent = lambda id, r=referenced, f=ff: f(r, id) 505 if hasattr(referenced, '_format') and referenced._format: 506 requires = validators.IS_IN_DB(field._db,referenced.id, 507 referenced._format) 508 if field.unique: 509 requires._and = validators.IS_NOT_IN_DB(field._db,field) 510 if field._tablename == field_type[10:]: 511 return validators.IS_EMPTY_OR(requires) 512 return requires 513 elif field._db and field_type.startswith('list:reference') and \ 514 field_type.find('.') < 0 and \ 515 field_type[15:] in field._db.tables: 516 referenced = field._db[field_type[15:]] 517 def list_ref_repr(ids, r=referenced, f=ff): 518 refs = r._db(r.id.belongs(ids)).select(r.id) 519 return (ids and ', '.join(f(r,ref.id) for ref in refs) or '') 520 field.represent = list_ref_repr 521 if hasattr(referenced, '_format') and referenced._format: 522 requires = validators.IS_IN_DB(field._db,referenced.id, 523 referenced._format,multiple=True) 524 if field.unique: 525 requires._and = validators.IS_NOT_IN_DB(field._db,field) 526 return requires 527 if field.unique: 528 requires.insert(0,validators.IS_NOT_IN_DB(field._db,field)) 529 sff = ['in', 'do', 'da', 'ti', 'de', 'bo'] 530 if field.notnull and not field_type[:2] in sff: 531 requires.insert(0, validators.IS_NOT_EMPTY()) 532 elif not field.notnull and field_type[:2] in sff and requires: 533 requires[-1] = validators.IS_EMPTY_OR(requires[-1]) 534 return requires 535
536 -def bar_escape(item):
537 return str(item).replace('|', '||')
538
539 -def bar_encode(items):
540 return '|%s|' % '|'.join(bar_escape(item) for item in items \ 541 if str(item).strip())
542
543 -def bar_decode_integer(value):
544 return [int(x) for x in value.split('|') if x.strip()]
545
546 -def bar_decode_string(value):
547 return [x.replace('||', '|') for x in string_unpack.split(value[1:-1]) \ 548 if x.strip()]
549
550 -def sql_represent(obj, fieldtype, dbname, db_codec='UTF-8'):
551 if type(obj) in (types.LambdaType, types.FunctionType): 552 obj = obj() 553 if isinstance(fieldtype, SQLCustomType): 554 return fieldtype.encoder(obj) 555 if isinstance(obj, (Expression, Field)): 556 return str(obj) 557 if fieldtype.startswith('list:'): 558 if not obj: 559 obj = [] 560 if not isinstance(obj, (list, tuple)): 561 obj = [obj] 562 if isinstance(obj, (list, tuple)): 563 obj = bar_encode(obj) 564 if obj is None: 565 return 'NULL' 566 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']: 567 return 'NULL' 568 if fieldtype == 'boolean': 569 if dbname == 'mssql': 570 if obj and not str(obj)[0].upper() == 'F': 571 return '1' 572 else: 573 return '0' 574 else: 575 if obj and not str(obj)[0].upper() == 'F': 576 return "'T'" 577 else: 578 return "'F'" 579 if fieldtype.startswith('i'): 580 return str(int(obj)) 581 if fieldtype.startswith('decimal'): 582 return str(obj) 583 elif fieldtype.startswith('r'): # reference 584 if fieldtype.find('.') > 0: 585 return repr(obj) 586 elif isinstance(obj, (Row, Reference)): 587 return str(obj['id']) 588 return str(int(obj)) 589 elif fieldtype == 'double': 590 return repr(float(obj)) 591 if isinstance(obj, unicode): 592 obj = obj.encode(db_codec) 593 if fieldtype == 'blob': 594 obj = base64.b64encode(str(obj)) 595 if dbname == 'db2': 596 return "BLOB('%s')" % obj 597 if dbname == 'oracle': 598 return ":CLOB('%s')" % obj 599 # FIXME: remove comment lines? 600 #elif fieldtype == 'text': 601 # if dbname == 'oracle': 602 # return ":CLOB('%s')" % obj.replace("'","?") ### FIX THIS 603 elif fieldtype == 'date': 604 # FIXME: remove comment lines? 605 # if dbname=='postgres': return "'%s'::bytea" % obj.replace("'","''") 606 607 if isinstance(obj, (datetime.date, datetime.datetime)): 608 obj = obj.isoformat()[:10] 609 else: 610 obj = str(obj) 611 if dbname in ['oracle', 'informix']: 612 return "to_date('%s', 'yyyy-mm-dd')" % obj 613 elif fieldtype == 'datetime': 614 if isinstance(obj, datetime.datetime): 615 if dbname == 'db2': 616 return "'%s'" % obj.isoformat()[:19].replace('T','-').replace(':','.') 617 else: 618 obj = obj.isoformat()[:19].replace('T',' ') 619 elif isinstance(obj, datetime.date): 620 if dbname == 'db2': 621 return "'%s'" % obj.isoformat()[:10]+'-00.00.00' 622 else: 623 obj = obj.isoformat()[:10]+' 00:00:00' 624 else: 625 obj = str(obj) 626 if dbname in ['oracle', 'informix']: 627 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj 628 elif fieldtype == 'time': 629 if isinstance(obj, datetime.time): 630 obj = obj.isoformat()[:10] 631 else: 632 obj = str(obj) 633 if not isinstance(obj,str): 634 obj = str(obj) 635 try: 636 obj.decode(db_codec) 637 except: 638 obj = obj.decode('latin1').encode(db_codec) 639 if dbname == 'mssql2' and (fieldtype == 'string' or fieldtype == 'text'): 640 return "N'%s'" % obj.replace("'", "''") 641 return "'%s'" % obj.replace("'", "''")
642 643
644 -def cleanup(text):
645 """ 646 validates that the given text is clean: only contains [0-9a-zA-Z_] 647 """ 648 649 if re.compile('[^0-9a-zA-Z_]').findall(text): 650 raise SyntaxError, \ 651 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \ 652 % text 653 return text
654 655
656 -def sqlite3_web2py_extract(lookup, s):
657 table = { 658 'year': (0, 4), 659 'month': (5, 7), 660 'day': (8, 10), 661 'hour': (11, 13), 662 'minute': (14, 16), 663 'second': (17, 19), 664 } 665 try: 666 (i, j) = table[lookup] 667 return int(s[i:j]) 668 except: 669 return None
670
671 -def oracle_fix_execute(command, execute):
672 args = [] 673 i = 1 674 while True: 675 m = oracle_fix.match(command) 676 if not m: 677 break 678 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):] 679 args.append(m.group('clob')[6:-2].replace("''", "'")) 680 i += 1 681 return execute(command[:-1], args)
682 683
684 -def autofields(db, text):
685 raise SyntaxError, "work in progress" 686 m = re.compile('(?P<i>\w+)') 687 (tablename, fields) = text.lower().split(':', 1) 688 tablename = tablename.replace(' ', '_') 689 newfields = [] 690 for field in fields.split(','): 691 if field.find(' by ') >= 0: 692 (items, keys) = field.split(' by ') 693 else: 694 (items, keys) = (field, '%(id)s') 695 items = m.findall(items) 696 if not items: 697 break 698 keys = m.sub('%(\g<i>)s', keys) 699 (requires, notnull, unique) = (None, False, False) 700 if items[-1] in ['notnull']: 701 (notnull, items) = (True, items[:-1]) 702 if items[-1] in ['unique']: 703 (unique, items) = (True, items[:-1]) 704 if items[-1] in ['text', 'date', 'datetime', 'time', 'blob', 'upload', 'password', 705 'integer', 'double', 'boolean', 'string']: 706 (items, t) = (items[:-1], items[-1]) 707 elif items[-1] in db.tables: 708 t = 'reference %s' % items[-1] 709 requires = validators.IS_IN_DB(db, '%s.%s' % (items[-1], db.tables[items[-1]].id.name), keys) 710 else: 711 t = 'string' 712 name = '_'.join(items) 713 if unique: 714 if requires: 715 raise SyntaxError, "Sorry not supported" 716 requires = validators.IS_NOT_IN_DB(db, '%s.%s' % (tablename, name)) 717 if requires and not notnull: 718 requires = validators.IS_EMPTY_OR(requires) 719 label = ' '.join([i.capitalize() for i in items]) 720 newfields.append(db.Field(name, t, label=label, requires=requires, 721 notnull=notnull, unique=unique)) 722 return tablename, newfields
723 724
725 -class Row(dict):
726 727 """ 728 a dictionary that lets you do d['a'] as well as d.a 729 this is only used to store a Row 730 """ 731
732 - def __getitem__(self, key):
733 key=str(key) 734 if key in self.get('_extra',{}): 735 return self._extra[key] 736 return dict.__getitem__(self, key)
737
738 - def __setitem__(self, key, value):
739 dict.__setitem__(self, str(key), value)
740
741 - def __getattr__(self, key):
742 return dict.__getitem__(self,key)
743
744 - def __setattr__(self, key, value):
745 dict.__setitem__(self,key,value)
746
747 - def __repr__(self):
748 return '<Row ' + dict.__repr__(self) + '>'
749
750 - def __int__(self):
751 return dict.__getitem__(self,'id')
752
753 - def __eq__(self,other):
754 try: 755 return self.as_dict() == other.as_dict() 756 except AttributeError: 757 return False
758
759 - def __ne__(self,other):
760 return not (self == other)
761
762 - def as_dict(self,datetime_to_str=True):
763 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list) 764 d = dict(self) 765 for k in copy.copy(d.keys()): 766 v=d[k] 767 if d[k]==None: 768 continue 769 elif isinstance(v,Row): 770 d[k]=v.as_dict() 771 elif isinstance(v,Reference): 772 d[k]=int(v) 773 elif isinstance(v,decimal.Decimal): 774 d[k]=float(v) 775 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)): 776 if datetime_to_str: 777 d[k] = v.isoformat().replace('T',' ')[:19] 778 elif not isinstance(v,SERIALIZABLE_TYPES): 779 del d[k] 780 return d
781 782
783 -def Row_unpickler(data):
784 return Row(cPickle.loads(data))
785
786 -def Row_pickler(data):
787 return Row_unpickler, (cPickle.dumps(data.as_dict(datetime_to_str=False)),)
788 789 copy_reg.pickle(Row, Row_pickler, Row_unpickler) 790
791 -class SQLCallableList(list):
792
793 - def __call__(self):
794 return copy.copy(self)
795
796 -class SQLDB(dict):
797 798 """ 799 an instance of this class represents a database connection 800 801 Example:: 802 803 db = SQLDB('sqlite://test.db') 804 db.define_table('tablename', Field('fieldname1'), 805 Field('fieldname2')) 806 807 """ 808 809 # ## this allows gluon to set a folder for this thread 810 _connection_pools={} 811
812 - def _exists(self, filename):
813 """ 814 to be used ONLY for files that on GAE may not be on filesystem 815 """ 816 return os.path.exists(filename)
817
818 - def _open(self, filename, mode='rb', lock=True):
819 """ 820 to be used ONLY for files that on GAE may not be on filesystem 821 """ 822 fileobj = open(filename,mode) 823 if lock and mode in ('r','rb'): 824 portalocker.lock(fileobj,portalocker.LOCK_SH) 825 elif lock and mode in ('w','wb','a'): 826 portalocker.lock(fileobj,portalocker.LOCK_EX) 827 return fileobj
828
829 - def _close(self, fileobj, unlock=True):
830 """ 831 to be used ONLY for files that on GAE may not be on filesystem 832 """ 833 if fileobj: 834 if unlock: 835 portalocker.unlock(fileobj) 836 fileobj.close()
837
838 - def _delete(self, filename):
839 os.unlink(filename)
840 841 @staticmethod
842 - def set_folder(folder):
843 thread.folder=folder
844 845 # ## this allows gluon to commit/rollback all dbs in this thread 846 847 @staticmethod
848 - def close_all_instances(action):
849 """ to close cleanly databases in a multithreaded environment """ 850 if not hasattr(thread,'instances'): 851 return 852 while thread.instances: 853 instance = thread.instances.pop() 854 action(instance) 855 # ## if you want pools, recycle this connection 856 really = True 857 if instance._pool_size: 858 sql_locker.acquire() 859 pool = SQLDB._connection_pools[instance._uri] 860 if len(pool) < instance._pool_size: 861 pool.append(instance._connection) 862 really = False 863 sql_locker.release() 864 if really: 865 instance._connection.close() 866 return
867 868 @staticmethod
869 - def distributed_transaction_begin(*instances):
870 if not instances: 871 return 872 instances = enumerate(instances) 873 for (i, db) in instances: 874 if not db._dbname in ['postgres', 'mysql', 'firebird']: 875 raise SyntaxError, 'distributed transaction only supported by postgresql, mysql, and firebird' 876 if db._dbname == 'mysql': 877 db._execute('XA START;')
878 879 @staticmethod
880 - def distributed_transaction_commit(*instances):
881 if not instances: 882 return 883 instances = enumerate(instances) 884 thread_key = '%s.%i' % (socket.gethostname(), threading.currentThread()) 885 keys = ['%s.%i' % (thread_key, i) for (i,db) in instances] 886 for (i, db) in instances: 887 if not db._dbname in ['postgres', 'mysql', 'firebird']: 888 raise SyntaxError, 'distributed transaction only supported by postgresql, mysql, and firebird' 889 try: 890 for (i, db) in instances: 891 if db._dbname == 'postgres': 892 db._execute("PREPARE TRANSACTION '%s';" % keys[i]) 893 elif db._dbname == 'mysql': 894 db._execute("XA END;") 895 db._execute("XA PREPARE;") 896 elif db._dbname == 'firebird': 897 db.prepare() 898 except: 899 for (i, db) in instances: 900 if db._dbname == 'postgres': 901 db._execute("ROLLBACK PREPARED '%s';" % keys[i]) 902 elif db._dbname == 'mysql': 903 db._execute("XA ROLLBACK;") 904 elif db._dbname == 'firebird': 905 db.rollback() 906 raise RuntimeError, 'failure to commit distributed transaction' 907 else: 908 for (i, db) in instances: 909 if db._dbname == 'postgres': 910 db._execute("COMMIT PREPARED '%s';" % keys[i]) 911 elif db._dbname == 'mysql': 912 db._execute("XA COMMIT;") 913 elif db._dbname == 'firebird': 914 db.commit() 915 return
916
917 - def _pool_connection(self, f):
918 919 # ## deal with particular case first: 920 921 if not self._pool_size: 922 self._connection = f() 923 return 924 uri = self._uri 925 sql_locker.acquire() 926 if not uri in SQLDB._connection_pools: 927 SQLDB._connection_pools[uri] = [] 928 if SQLDB._connection_pools[uri]: 929 self._connection = SQLDB._connection_pools[uri].pop() 930 sql_locker.release() 931 else: 932 sql_locker.release() 933 self._connection = f()
934
935 - def __init__(self, uri='sqlite://dummy.db', pool_size=0, 936 folder=None, db_codec='UTF-8', check_reserved=None, 937 migrate=True, fake_migrate=False, 938 decode_credentials=False):
939 if not decode_credentials: 940 credential_decoder = lambda cred: cred 941 else: 942 import urllib 943 credential_decoder = lambda cred: urllib.unquote(cred) 944 945 self._uri = str(uri) # NOTE: assuming it is in utf8!!! 946 self._pool_size = pool_size 947 self._db_codec = db_codec 948 self.check_reserved = check_reserved 949 if self.check_reserved: 950 from reserved_sql_keywords import ADAPTERS as RSK 951 self.RSK = RSK 952 self._migrate = migrate 953 self._fake_migrate = fake_migrate 954 self['_lastsql'] = '' 955 self.tables = SQLCallableList() 956 957 # Check if there is a folder for this thread else use '' 958 959 if folder: 960 self._folder = folder 961 elif hasattr(thread,'folder'): 962 self._folder = thread.folder 963 else: 964 self._folder = thread.folder = '' 965 966 # Now connect to database 967 968 if self._uri.startswith('sqlite:memory:'): 969 self._dbname = 'sqlite' 970 self._pool_connection(lambda: \ 971 sqlite3.Connection(':memory:', 972 check_same_thread=False)) 973 self._connection.create_function('web2py_extract', 2, 974 sqlite3_web2py_extract) 975 # self._connection.row_factory = sqlite3.Row 976 self._cursor = self._connection.cursor() 977 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 978 elif not is_jdbc and self._uri.startswith('sqlite://'): 979 self._dbname = 'sqlite' 980 path_encoding = sys.getfilesystemencoding() or \ 981 locale.getdefaultlocale()[1] 982 if uri[9] != '/': 983 dbpath = os.path.join( 984 self._folder.decode(path_encoding).encode('utf8'), 985 uri[9:]) 986 else: 987 dbpath = uri[9:] 988 self._pool_connection(lambda : sqlite3.Connection(dbpath, 989 check_same_thread=False)) 990 self._connection.create_function('web2py_extract', 2, 991 sqlite3_web2py_extract) 992 # self._connection.row_factory = sqlite3.Row 993 self._cursor = self._connection.cursor() 994 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 995 elif self._uri.startswith('mysql://'): 996 self._dbname = 'mysql' 997 m = re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$' 998 ).match(self._uri[8:]) 999 if not m: 1000 raise SyntaxError, \ 1001 "Invalid URI string in SQLDB: %s" % self._uri 1002 user = credential_decoder(m.group('user')) 1003 if not user: 1004 raise SyntaxError, 'User required' 1005 passwd = credential_decoder(m.group('passwd')) 1006 if not passwd: 1007 passwd = '' 1008 host = m.group('host') 1009 if not host: 1010 raise SyntaxError, 'Host name required' 1011 db = m.group('db') 1012 if not db: 1013 raise SyntaxError, 'Database name required' 1014 port = m.group('port') or '3306' 1015 1016 charset = m.group('charset') or 'utf8' 1017 1018 self._pool_connection(lambda : MySQLdb.Connection( 1019 db=db, 1020 user=user, 1021 passwd=passwd, 1022 host=host, 1023 port=int(port), 1024 charset=charset, 1025 )) 1026 self._cursor = self._connection.cursor() 1027 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1028 self._execute('SET FOREIGN_KEY_CHECKS=1;') 1029 self._execute("SET sql_mode='NO_BACKSLASH_ESCAPES';") 1030 elif not is_jdbc and self._uri.startswith('postgres://'): 1031 self._dbname = 'postgres' 1032 m = \ 1033 re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$' 1034 ).match(self._uri[11:]) 1035 if not m: 1036 raise SyntaxError, "Invalid URI string in SQLDB" 1037 user = credential_decoder(m.group('user')) 1038 if not user: 1039 raise SyntaxError, 'User required' 1040 passwd = credential_decoder(m.group('passwd')) 1041 if not passwd: 1042 passwd = '' 1043 host = m.group('host') 1044 if not host: 1045 raise SyntaxError, 'Host name required' 1046 db = m.group('db') 1047 if not db: 1048 raise SyntaxError, 'Database name required' 1049 port = m.group('port') or '5432' 1050 1051 sslmode = m.group('sslmode') 1052 if sslmode: 1053 msg = ("dbname='%s' user='%s' host='%s'" 1054 "port=%s password='%s' sslmode='%s'") \ 1055 % (db, user, host, port, passwd, sslmode) 1056 else: 1057 msg = ("dbname='%s' user='%s' host='%s'" 1058 "port=%s password='%s'") \ 1059 % (db, user, host, port, passwd) 1060 1061 self._pool_connection(lambda : psycopg2.connect(msg)) 1062 self._connection.set_client_encoding('UTF8') 1063 self._cursor = self._connection.cursor() 1064 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1065 query = 'BEGIN;' 1066 self['_lastsql'] = query 1067 self._execute(query) 1068 self._execute("SET CLIENT_ENCODING TO 'UNICODE';") # ## not completely sure but should work 1069 self._execute("SET standard_conforming_strings=on;") 1070 elif self._uri.startswith('oracle://'): 1071 self._dbname = 'oracle' 1072 self._pool_connection(lambda : \ 1073 cx_Oracle.connect(self._uri[9:],threaded=True)) 1074 self._cursor = self._connection.cursor() 1075 self._execute = lambda a: \ 1076 oracle_fix_execute(a,self._cursor.execute) 1077 ### becase Oracle uses date for both date and datetime 1078 self._execute("ALTER SESSION set NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';") 1079 # self._execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD';") 1080 self._execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';") 1081 elif self._uri.startswith('mssql://') or self._uri.startswith('mssql2://'): 1082 1083 # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8 1084 1085 if self._uri.startswith('mssql://'): 1086 skip = 8 1087 self._dbname = 'mssql' 1088 elif self._uri.startswith('mssql2://'): 1089 skip = 9 1090 self._dbname = 'mssql2' 1091 if '@' not in self._uri[skip:]: 1092 try: 1093 m = re.compile('^(?P<dsn>.+)$' 1094 ).match(self._uri[skip:]) 1095 if not m: 1096 raise SyntaxError, \ 1097 'Parsing uri string(%s) has no result' % (self._uri[skip:]) 1098 dsn = m.group('dsn') 1099 if not dsn: 1100 raise SyntaxError, 'DSN required' 1101 except SyntaxError, e: 1102 logger.error('NdGpatch error') 1103 raise e 1104 cnxn = 'DSN=%s' % dsn 1105 else: 1106 m = \ 1107 re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$' 1108 ).match(self._uri[skip:]) 1109 if not m: 1110 raise SyntaxError, \ 1111 "Invalid URI string in SQLDB: %s" % self._uri 1112 user = credential_decoder(m.group('user')) 1113 if not user: 1114 raise SyntaxError, 'User required' 1115 passwd = credential_decoder(m.group('passwd')) 1116 if not passwd: 1117 passwd = '' 1118 host = m.group('host') 1119 if not host: 1120 raise SyntaxError, 'Host name required' 1121 db = m.group('db') 1122 if not db: 1123 raise SyntaxError, 'Database name required' 1124 port = m.group('port') or '1433' 1125 1126 # Parse the optional url name-value arg pairs after the '?' 1127 # (in the form of arg1=value1&arg2=value2&...) 1128 # Default values (drivers like FreeTDS insist on uppercase parameter keys) 1129 argsdict = { 'DRIVER':'{SQL Server}' } 1130 1131 urlargs = m.group('urlargs') or '' 1132 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)') 1133 for argmatch in argpattern.finditer(urlargs): 1134 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue') 1135 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()]) 1136 1137 cnxn = \ 1138 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \ 1139 % (host, port, db, user, passwd, urlargs) 1140 self._pool_connection(lambda : pyodbc.connect(cnxn)) 1141 self._cursor = self._connection.cursor() 1142 if self._uri.startswith('mssql://'): 1143 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1144 elif self._uri.startswith('mssql2://'): 1145 self._execute = lambda a: \ 1146 self._cursor.execute(unicode(a, 'utf8')) 1147 elif self._uri.startswith('firebird://'): 1148 self._dbname = 'firebird' 1149 m = re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(self._uri[11:]) 1150 if not m: 1151 raise SyntaxError, \ 1152 "Invalid URI string in SQLDB: %s" % self._uri 1153 user = credential_decoder(m.group('user')) 1154 if not user: 1155 raise SyntaxError, 'User required' 1156 passwd = credential_decoder(m.group('passwd')) 1157 if not passwd: 1158 passwd = '' 1159 host = m.group('host') 1160 if not host: 1161 raise SyntaxError, 'Host name required' 1162 db = m.group('db') 1163 if not db: 1164 raise SyntaxError, 'Database name required' 1165 port = m.group('port') or '3050' 1166 1167 charset = m.group('charset') or 'UTF8' 1168 1169 self._pool_connection(lambda : \ 1170 kinterbasdb.connect(dsn='%s/%s:%s' % (host, port, db), 1171 user=user, 1172 password=passwd, 1173 charset=charset)) 1174 self._cursor = self._connection.cursor() 1175 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1176 elif self._uri.startswith('firebird_embedded://'): 1177 self._dbname = 'firebird' 1178 m = re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(self._uri[20:]) 1179 if not m: 1180 raise SyntaxError, \ 1181 "Invalid URI string in SQLDB: %s" % self._uri 1182 user = credential_decoder(m.group('user')) 1183 if not user: 1184 raise SyntaxError, 'User required' 1185 passwd = credential_decoder(m.group('passwd')) 1186 if not passwd: 1187 passwd = '' 1188 pathdb = m.group('path') 1189 if not pathdb: 1190 raise SyntaxError, 'Path required' 1191 charset = m.group('charset') 1192 if not charset: 1193 charset = 'UTF8' 1194 self._pool_connection(lambda : \ 1195 kinterbasdb.connect(host='', 1196 database=pathdb, 1197 user=user, 1198 password=passwd, 1199 charset=charset)) 1200 self._cursor = self._connection.cursor() 1201 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1202 elif self._uri.startswith('informix://'): 1203 self._dbname = 'informix' 1204 m = \ 1205 re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$' 1206 ).match(self._uri[11:]) 1207 if not m: 1208 raise SyntaxError, \ 1209 "Invalid URI string in SQLDB: %s" % self._uri 1210 user = credential_decoder(m.group('user')) 1211 if not user: 1212 raise SyntaxError, 'User required' 1213 passwd = credential_decoder(m.group('passwd')) 1214 if not passwd: 1215 passwd = '' 1216 host = m.group('host') 1217 if not host: 1218 raise SyntaxError, 'Host name required' 1219 db = m.group('db') 1220 if not db: 1221 raise SyntaxError, 'Database name required' 1222 port = m.group('port') or '3050' 1223 1224 self._pool_connection(lambda : informixdb.connect('%s@%s' 1225 % (db, host), user=user, 1226 password=passwd, autocommit=False)) 1227 self._cursor = self._connection.cursor() 1228 self._execute = lambda a: self._cursor.execute(a[:-1]) 1229 elif self._uri.startswith('db2:'): 1230 self._dbname, cnxn = self._uri.split(':', 1) 1231 self._pool_connection(lambda : pyodbc.connect(cnxn)) 1232 self._cursor = self._connection.cursor() 1233 self._execute = lambda a: self._cursor.execute(a[:-1]) 1234 elif is_jdbc and self._uri.startswith('sqlite://'): 1235 self._dbname='sqlite' 1236 if uri[9] != '/': 1237 dbpath = os.path.join(self._folder, uri[9:]) 1238 else: 1239 dbpath = uri[9:] 1240 self._pool_connection(lambda dbpath=dbpath: zxJDBC.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath))) 1241 self._cursor = self._connection.cursor() 1242 self._execute = lambda a: self._cursor.execute(a[:-1]) 1243 elif is_jdbc and self._uri.startswith('postgres://'): 1244 self._dbname = 'postgres' 1245 m = \ 1246 re.compile('^(?P<user>[^:@]+)(\:(?P<passwd>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$' 1247 ).match(self._uri[11:]) 1248 if not m: 1249 raise SyntaxError, "Invalid URI string in SQLDB" 1250 user = credential_decoder(m.group('user')) 1251 if not user: 1252 raise SyntaxError, 'User required' 1253 passwd = credential_decoder(m.group('passwd')) 1254 if not passwd: 1255 passwd = '' 1256 host = m.group('host') 1257 if not host: 1258 raise SyntaxError, 'Host name required' 1259 db = m.group('db') 1260 if not db: 1261 raise SyntaxError, 'Database name required' 1262 port = m.group('port') or '5432' 1263 1264 msg = \ 1265 "dbname='%s' user='%s' host='%s' port=%s password='%s'"\ 1266 % (db, user, host, port, passwd) 1267 params = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), 1268 user,passwd) 1269 self._pool_connection(lambda params=params:zxJDBC.connect(*params)) 1270 self._connection.set_client_encoding('UTF8') 1271 self._cursor = self._connection.cursor() 1272 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1273 query = 'BEGIN;' 1274 self['_lastsql'] = query 1275 self._execute(query) 1276 self._execute("SET CLIENT_ENCODING TO 'UNICODE';") # ## not completely sure but should work 1277 self._execute("SET standard_conforming_strings=on;") 1278 elif self._uri.startswith('ingres:'): 1279 """Currently only one URI form supported: 1280 1281 ingres://LOCAL_DATABASE_NAME 1282 1283 NOTE may also use: "ingres:LOCAL_DATABASE_NAME" 1284 and avoid the slashes "/". 1285 """ 1286 self._dbname, connstr = self._uri.split(':', 1) 1287 # Simple URI processing 1288 connstr = connstr.lstrip() 1289 while connstr.startswith('/'): 1290 connstr = connstr[1:] 1291 1292 database_name=connstr # Assume only (local) dbname is passed in 1293 vnode = '(local)' 1294 servertype = 'ingres' 1295 trace = (0, None) # No tracing 1296 self._pool_connection(lambda : \ 1297 ingresdbi.connect( 1298 database=database_name, 1299 vnode=vnode, 1300 servertype=servertype, 1301 trace=trace)) 1302 self._cursor = self._connection.cursor() 1303 self._execute = lambda *a, **b: self._cursor.execute(*a, **b) 1304 elif self._uri == 'None': 1305 1306 1307 class Dummy: 1308 1309 lastrowid = 1 1310 1311 def __getattr__(self, value): 1312 return lambda *a, **b: ''
1313 1314 1315 self._dbname = 'sqlite' 1316 self._connection = Dummy() 1317 self._cursor = Dummy() 1318 self._execute = lambda a: [] 1319 else: 1320 raise SyntaxError, \ 1321 'database type not supported: %s' % self._uri 1322 self._translator = SQL_DIALECTS[self._dbname] 1323 1324 # ## register this instance of SQLDB 1325 if not hasattr(thread,'instances'): 1326 thread.instances = [] 1327 thread.instances.append(self) 1328
1329 - def check_reserved_keyword(self, name):
1330 """ 1331 Validates ``name`` against SQL keywords 1332 Uses self.check_reserve which is a list of 1333 operators to use. 1334 self.check_reserved 1335 ['common', 'postgres', 'mysql'] 1336 self.check_reserved 1337 ['all'] 1338 """ 1339 for backend in self.check_reserved: 1340 if name.upper() in self.RSK[backend]: 1341 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
1342 1343
1344 - def define_table( 1345 self, 1346 tablename, 1347 *fields, 1348 **args 1349 ):
1350 1351 for key in args: 1352 if key not in [ 1353 'migrate', 1354 'primarykey', 1355 'fake_migrate', 1356 'format', 1357 'trigger_name', 1358 'sequence_name']: 1359 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key) 1360 migrate = args.get('migrate', self._migrate) 1361 fake_migrate = args.get('fake_migrate', self._fake_migrate) 1362 format = args.get('format', None) 1363 trigger_name = args.get('trigger_name', None) 1364 sequence_name = args.get('sequence_name', None) 1365 tablename = cleanup(tablename) 1366 1367 #default values (for compatibility with previous version) 1368 if trigger_name is None: 1369 if self._dbname == 'firebird': 1370 trigger_name = 'trg_id_%s' % tablename 1371 if self._dbname == 'oracle': 1372 trigger_name = '%s_trigger' % tablename 1373 if self._dbname == 'ingres': 1374 pass 1375 1376 #default (for compatibility with previous version) 1377 if sequence_name is None: 1378 if self._dbname == 'firebird': 1379 sequence_name = 'genid_%s' % tablename 1380 if self._dbname == 'oracle': 1381 sequence_name = '%s_sequence' % tablename 1382 if self._dbname == 'ingres': 1383 pass 1384 if self._dbname == 'postgres': 1385 sequence_name = '%s_id_Seq' % tablename 1386 1387 if tablename in self.tables or hasattr(self,tablename): 1388 raise SyntaxError, 'table already defined: %s' % tablename 1389 if tablename.startswith('_'): 1390 raise SyntaxError, 'invalid table name: %s' % tablename 1391 if self.check_reserved: 1392 self.check_reserved_keyword(tablename) 1393 1394 if 'primarykey' in args: 1395 t = self[tablename] = KeyedTable(self, tablename, *fields, 1396 **dict(primarykey=args['primarykey'], trigger_name=trigger_name, sequence_name=sequence_name)) 1397 else: 1398 t = self[tablename] = Table(self, tablename, *fields, 1399 **dict(trigger_name=trigger_name, sequence_name=sequence_name)) 1400 # db magic 1401 if self._uri == 'None': 1402 return t 1403 1404 t._create_references() 1405 1406 if migrate: 1407 try: 1408 sql_locker.acquire() 1409 t._create(migrate=migrate, fake_migrate=fake_migrate) 1410 finally: 1411 sql_locker.release() 1412 else: 1413 t._dbt = None 1414 self.tables.append(tablename) 1415 t._format = format 1416 return t
1417
1418 - def __iter__(self):
1419 for tablename in self.tables: 1420 yield self[tablename]
1421
1422 - def __getitem__(self, key):
1423 return dict.__getitem__(self, str(key))
1424
1425 - def __setitem__(self, key, value):
1426 dict.__setitem__(self, str(key), value)
1427
1428 - def __getattr__(self, key):
1429 return dict.__getitem__(self,key)
1430
1431 - def __setattr__(self, key, value):
1432 if key in self: 1433 raise SyntaxError, \ 1434 'Object %s exists and cannot be redefined' % key 1435 self[key] = value
1436
1437 - def __repr__(self):
1438 return '<SQLDB ' + dict.__repr__(self) + '>'
1439
1440 - def __call__(self, where=None):
1441 if isinstance(where,Table): 1442 where = where._id>0 1443 elif isinstance(where,Field): 1444 where = where!=None 1445 return Set(self, where)
1446
1447 - def prepare(self):
1448 self._connection.prepare()
1449
1450 - def commit(self):
1451 self._connection.commit()
1452
1453 - def rollback(self):
1454 self._connection.rollback()
1455
1456 - def executesql(self, query, placeholders=None, as_dict=False):
1457 """ 1458 placeholders is optional and will always be None when using DAL 1459 if using raw SQL with placeholders, placeholders may be 1460 a sequence of values to be substituted in 1461 or, *if supported by the DB driver*, a dictionary with keys 1462 matching named placeholders in your SQL. 1463 1464 Added 2009-12-05 "as_dict" optional argument. Will always be 1465 None when using DAL. If using raw SQL can be set to True 1466 and the results cursor returned by the DB driver will be 1467 converted to a sequence of dictionaries keyed with the db 1468 field names. Tested with SQLite but should work with any database 1469 since the cursor.description used to get field names is part of the 1470 Python dbi 2.0 specs. Results returned with as_dict = True are 1471 the same as those returned when applying .to_list() to a DAL query. 1472 1473 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}] 1474 1475 --bmeredyk 1476 """ 1477 self['_lastsql'] = query 1478 if placeholders: 1479 self['_lastsql'] +=" with "+str(placeholders) 1480 self._execute(query, placeholders) 1481 else: 1482 self._execute(query) 1483 if as_dict: 1484 if not hasattr(self._cursor,'description'): 1485 raise RuntimeError, "database does not support executesql(...,as_dict=True)" 1486 # Non-DAL legacy db query, converts cursor results to dict. 1487 # sequence of 7-item sequences. each sequence tells about a column. 1488 # first item is always the field name according to Python Database API specs 1489 columns = self._cursor.description 1490 # reduce the column info down to just the field names 1491 fields = [f[0] for f in columns] 1492 # will hold our finished resultset in a list 1493 data = self._cursor.fetchall() 1494 # convert the list for each row into a dictionary so it's 1495 # easier to work with. row['field_name'] rather than row[0] 1496 return [dict(zip(fields,row)) for row in data] 1497 # see if any results returned from database 1498 try: 1499 return self._cursor.fetchall() 1500 except: 1501 return None
1502
1503 - def _update_referenced_by(self, other):
1504 for tablename in self.tables: 1505 by = self[tablename]._referenced_by 1506 by[:] = [item for item in by if not item[0] == other]
1507
1508 - def export_to_csv_file(self, ofile, *args, **kwargs):
1509 for table in self.tables: 1510 ofile.write('TABLE %s\r\n' % table) 1511 self(self[table]._id > 0).select().export_to_csv_file(ofile, *args, **kwargs) 1512 ofile.write('\r\n\r\n') 1513 ofile.write('END')
1514
1515 - def import_from_csv_file(self, ifile, id_map={}, null='<NULL>', unique='uuid', *args, **kwargs):
1516 for line in ifile: 1517 line = line.strip() 1518 if not line: 1519 continue 1520 elif line == 'END': 1521 return 1522 elif not line.startswith('TABLE ') or not line[6:] in self.tables: 1523 raise SyntaxError, 'invalid file format' 1524 else: 1525 tablename = line[6:] 1526 self[tablename].import_from_csv_file(ifile, id_map, null, unique, *args, **kwargs)
1527
1528 -class SQLALL(object):
1529 """ 1530 Helper class providing a comma-separated string having all the field names 1531 (prefixed by table name and '.') 1532 1533 normally only called from within gluon.sql 1534 """ 1535
1536 - def __init__(self, table):
1537 self.table = table
1538
1539 - def __str__(self):
1540 s = ['%s.%s' % (self.table._tablename, name) for name in 1541 self.table.fields] 1542 return ', '.join(s)
1543 1544
1545 -class SQLJoin(object):
1546 """ 1547 Helper class providing the join statement between the given tables/queries. 1548 1549 Normally only called from gluon.sql 1550 """ 1551
1552 - def __init__(self, table, query):
1553 self.table = table 1554 self.query = query
1555
1556 - def __str__(self):
1557 return '%s ON %s' % (self.table, self.query)
1558 1559
1560 -class Reference(int):
1561
1562 - def __allocate(self):
1563 if not self._record: 1564 self._record = self._table[int(self)] 1565 if not self._record: 1566 raise RuntimeError, "Using a recursive select but encountered a broken reference"
1567
1568 - def __getattr__(self,key):
1569 if key == 'id': 1570 return int(self) 1571 self.__allocate() 1572 return self._record.get(key, None)
1573
1574 - def __setattr__(self,key,value):
1575 if key[:1] == '_': 1576 int.__setattr__(self, key, value) 1577 return 1578 self.__allocate() 1579 self._record[key] = value
1580
1581 - def __getitem__(self,key):
1582 if key == 'id': 1583 return int(self) 1584 self.__allocate() 1585 return self._record.get(key, None)
1586
1587 - def __setitem__(self, key, value):
1588 self.__allocate() 1589 self._record[key] = value
1590
1591 -def Reference_unpickler(data):
1592 return marshal.loads(data)
1593
1594 -def Reference_pickler(data):
1595 try: 1596 marshal_dump = marshal.dumps(int(data)) 1597 except AttributeError: 1598 marshal_dump = 'i%s' % struct.pack('<i', int(data)) 1599 return (Reference_unpickler, (marshal_dump,))
1600 1601 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler) 1602
1603 -class Table(dict):
1604 1605 """ 1606 an instance of this class represents a database table 1607 1608 Example:: 1609 1610 db = SQLDB(...) 1611 db.define_table('users', Field('name')) 1612 db.users.insert(name='me') # print db.users._insert(...) to see SQL 1613 db.users.drop() 1614 """ 1615
1616 - def __init__( 1617 self, 1618 db, 1619 tablename, 1620 *fields, 1621 **args):
1622 """ 1623 Initializes the table and performs checking on the provided fields. 1624 1625 Each table will have automatically an 'id'. 1626 1627 If a field is of type Table, the fields (excluding 'id') from that table 1628 will be used instead. 1629 1630 :raises SyntaxError: when a supplied field is of incorrect type. 1631 """ 1632 new_fields = [ Field('id', 'id') ] 1633 for field in fields: 1634 if hasattr(field, '_db'): 1635 field = copy.copy(field) 1636 if isinstance(field, Field): 1637 if field.type == 'id': 1638 # Keep this alias for the primary key. 1639 new_fields[0] = field 1640 else: 1641 new_fields.append(field) 1642 elif isinstance(field, Table): 1643 new_fields += [copy.copy(field[f]) for f in 1644 field.fields[1:]] 1645 else: 1646 raise SyntaxError, \ 1647 'define_table argument is not a Field: %s' % field 1648 fields = new_fields 1649 self._db = db 1650 self._id = fields[0] 1651 self._tablename = tablename 1652 self._trigger_name = args.get('trigger_name', None) 1653 self._sequence_name = args.get('sequence_name', None) 1654 self.fields = SQLCallableList() 1655 self.virtualfields = [] 1656 fields = list(fields) 1657 1658 for field in fields: 1659 if db and db.check_reserved: 1660 db.check_reserved_keyword(field.name) 1661 self.fields.append(field.name) 1662 self[field.name] = field 1663 if field.type == 'id': 1664 self['id'] = field 1665 field._tablename = self._tablename 1666 field._table = self 1667 field._db = self._db 1668 if field.requires == DEFAULT: 1669 field.requires = sqlhtml_validators(field) 1670 self.ALL = SQLALL(self)
1671
1672 - def _create_references(self):
1673 self._referenced_by = [] 1674 for fieldname in self.fields: 1675 field=self[fieldname] 1676 if isinstance(field.type,str) and field.type.startswith('reference'): 1677 referenced = field.type[10:].strip() 1678 if not referenced: 1679 raise SyntaxError, 'Table: reference to nothing: %s' % referenced 1680 if not referenced in self._db: 1681 raise SyntaxError, 'Table: table "%s" does not exist' % referenced 1682 referee = self._db[referenced] 1683 if self._tablename in referee.fields: 1684 raise SyntaxError, 'Field: table %s has same name as a field in referenced table %s' % (self._tablename, referee._tablename) 1685 referee._referenced_by.append((self._tablename, field.name))
1686
1687 - def _filter_fields(self, record, id=False):
1688 return dict([(k, v) for (k, v) in record.items() if k 1689 in self.fields and (k != 'id' or id)])
1690
1691 - def __getitem__(self, key):
1692 if not key: 1693 return None 1694 elif str(key).isdigit(): 1695 return self._db(self.id == key).select(limitby=(0, 1)).first() 1696 else: 1697 return dict.__getitem__(self, str(key))
1698
1699 - def __call__(self, id=DEFAULT, **kwargs):
1700 if id!=DEFAULT: 1701 if isinstance(id, Query): 1702 record = self._db(id).select(limitby=(0, 1)).first() 1703 elif not str(id).isdigit(): 1704 record = None 1705 else: 1706 record = self._db(self._id == id).select(limitby=(0, 1)).first() 1707 if record: 1708 for k,v in kwargs.items(): 1709 if record[k] != v: return None 1710 return record 1711 elif kwargs: 1712 query = reduce(lambda a, b:a & b, [self[k] == v for k, v in kwargs.items()]) 1713 return self._db(query).select(limitby=(0, 1)).first() 1714 else: 1715 return None
1716
1717 - def __setitem__(self, key, value):
1718 if str(key).isdigit(): 1719 if key == 0: 1720 self.insert(**self._filter_fields(value)) 1721 elif not self._db(self._id == key)\ 1722 .update(**self._filter_fields(value)): 1723 raise SyntaxError, 'No such record: %s' % key 1724 else: 1725 dict.__setitem__(self, str(key), value)
1726
1727 - def __delitem__(self, key):
1728 if not str(key).isdigit() or not self._db(self._id == key).delete(): 1729 raise SyntaxError, 'No such record: %s' % key
1730
1731 - def __getattr__(self, key):
1732 return dict.__getitem__(self,key)
1733
1734 - def __setattr__(self, key, value):
1735 if key in self: 1736 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key 1737 dict.__setitem__(self, key, value)
1738
1739 - def __iter__(self):
1740 for fieldname in self.fields: 1741 yield self[fieldname]
1742
1743 - def __repr__(self):
1744 return '<Table ' + dict.__repr__(self) + '>'
1745
1746 - def __str__(self):
1747 if self.get('_ot', None): 1748 return '%s AS %s' % (self._ot, self._tablename) 1749 return self._tablename
1750
1751 - def with_alias(self, alias):
1752 other = copy.copy(self) 1753 other['_ot'] = other._tablename 1754 other['ALL'] = SQLALL(other) 1755 other['_tablename'] = alias 1756 for fieldname in other.fields: 1757 other[fieldname] = copy.copy(other[fieldname]) 1758 other[fieldname]._tablename = alias 1759 self._db[alias] = self 1760 return other
1761
1762 - def _create(self, migrate=True, fake_migrate=False):
1763 fields = [] 1764 sql_fields = {} 1765 sql_fields_aux = {} 1766 for k in self.fields: 1767 field = self[k] 1768 if isinstance(field.type, SQLCustomType): 1769 ftype = field.type.native or field.type.type 1770 elif field.type.startswith('reference'): 1771 referenced = field.type[10:].strip() 1772 constraint_name = '%s_%s__constraint' % (self._tablename, field.name) 1773 if self._db._dbname == 'oracle' and len(constraint_name) > 30: 1774 constraint_name = '%s_%s__constraint' % (self._tablename[:10], field.name[:7]) 1775 ftype = self._db._translator[field.type[:9]]\ 1776 % dict(table_name=self._tablename, 1777 field_name=field.name, 1778 constraint_name=constraint_name, 1779 foreign_key=referenced + ('(%s)' % self._db[referenced].fields[0]), 1780 on_delete_action=field.ondelete) 1781 elif field.type.startswith('list:reference'): 1782 ftype = self._db._translator[field.type[:14]] 1783 elif field.type.startswith('decimal'): 1784 precision, scale = [int(x) for x in field.type[8:-1].split(',')] 1785 ftype = self._db._translator[field.type[:7]] % \ 1786 dict(precision=precision, scale=scale) 1787 elif not field.type in self._db._translator: 1788 raise SyntaxError, 'Field: unknown field type: %s for %s' % \ 1789 (field.type, field.name) 1790 else: 1791 ftype = self._db._translator[field.type]\ 1792 % dict(length=field.length) 1793 if not isinstance(field.type, SQLCustomType) and \ 1794 not field.type.startswith('id') and \ 1795 not field.type.startswith('reference'): 1796 if field.notnull: 1797 ftype += ' NOT NULL' 1798 if field.unique: 1799 ftype += ' UNIQUE' 1800 1801 # add to list of fields 1802 sql_fields[field.name] = ftype 1803 1804 if field.default!=None: 1805 # caveat: sql_fields and sql_fields_aux differ for default values 1806 # sql_fields is used to trigger migrations and sql_fields_aux 1807 # are used for create table 1808 # the reason is that we do not want to trigger a migration simply 1809 # because a default value changes 1810 ftype = ftype.replace('NOT NULL', 1811 self._db._translator['notnull'] 1812 % dict(default=sql_represent(field.default, 1813 field.type, self._db._dbname, self._db._db_codec))) 1814 1815 sql_fields_aux[field.name] = ftype 1816 fields.append('%s %s' % (field.name, ftype)) 1817 other = ';' 1818 1819 # backend-specific extensions to fields 1820 if self._db._dbname == 'mysql': 1821 fields.append('PRIMARY KEY(%s)' % self.fields[0]) 1822 other = ' ENGINE=InnoDB CHARACTER SET utf8;' 1823 1824 fields = ',\n '.join(fields) 1825 query = '''CREATE TABLE %s(\n %s\n)%s''' % \ 1826 (self._tablename, fields, other) 1827 1828 if self._db._uri.startswith('sqlite:///'): 1829 path_encoding = sys.getfilesystemencoding() or \ 1830 locale.getdefaultlocale()[1] 1831 dbpath = self._db._uri[9:self._db._uri.rfind('/')]\ 1832 .decode('utf8').encode(path_encoding) 1833 else: 1834 dbpath = self._db._folder 1835 if not migrate: 1836 return query 1837 elif self._db._uri.startswith('sqlite:memory:'): 1838 self._dbt = None 1839 elif isinstance(migrate, str): 1840 self._dbt = os.path.join(dbpath, migrate) 1841 else: 1842 self._dbt = os.path.join(dbpath, '%s_%s.table' \ 1843 % (md5_hash(self._db._uri), self._tablename)) 1844 if self._dbt: 1845 self._logfilename = os.path.join(dbpath, 'sql.log') 1846 logfile = self._db._open(self._logfilename, 'a') 1847 else: 1848 logfile = None 1849 if not self._dbt or not self._db._exists(self._dbt): 1850 if self._dbt: 1851 logfile.write('timestamp: %s\n' 1852 % datetime.datetime.today().isoformat()) 1853 logfile.write(query + '\n') 1854 self._db['_lastsql'] = query 1855 if self._db._dbname == 'ingres': 1856 # pre-create table auto inc code (if needed) 1857 tmp_seqname=gen_ingres_sequencename(self._tablename) 1858 query=query.replace(INGRES_SEQNAME, tmp_seqname) 1859 self._db._execute('create sequence %s' % tmp_seqname) 1860 if not fake_migrate: 1861 self._db._execute(query) 1862 if self._db._dbname in ['oracle']: 1863 t = self._tablename 1864 self._db._execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE;' 1865 % self._sequence_name) 1866 self._db._execute('CREATE OR REPLACE TRIGGER %s BEFORE INSERT ON %s FOR EACH ROW BEGIN SELECT %s.nextval INTO :NEW.id FROM DUAL; END;\n' 1867 % (self._trigger_name, t, self._sequence_name)) 1868 elif self._db._dbname == 'firebird': 1869 t = self._tablename 1870 self._db._execute('create generator %s;' % self._sequence_name) 1871 self._db._execute('set generator %s to 0;' % self._sequence_name) 1872 self._db._execute('create trigger %s for %s active before insert position 0 as\nbegin\nif(new.id is null) then\nbegin\nnew.id = gen_id(%s, 1);\nend\nend;' 1873 % (self._trigger_name, t, self._sequence_name)) 1874 elif self._db._dbname == 'ingres': 1875 # post create table auto inc code (if needed) 1876 # modify table to btree for performance.... 1877 # Older Ingres releases could use rule/trigger like Oracle above. 1878 modify_tbl_sql = 'modify %s to btree unique on %s' % (self._tablename, 'id') # hard coded id column 1879 self._db._execute(modify_tbl_sql) 1880 self._db.commit() 1881 if self._dbt: 1882 tfile = self._db._open(self._dbt, 'w') 1883 cPickle.dump(sql_fields, tfile) 1884 self._db._close(tfile) 1885 if fake_migrate: 1886 logfile.write('faked!\n') 1887 else: 1888 logfile.write('success!\n') 1889 else: 1890 tfile = self._db._open(self._dbt, 'r') 1891 try: 1892 sql_fields_old = cPickle.load(tfile) 1893 except EOFError: 1894 self._db._close(tfile) 1895 self._db._close(logfile) 1896 raise RuntimeError, 'File %s appears corrupted' % self._dbt 1897 self._db._close(tfile) 1898 if sql_fields != sql_fields_old: 1899 self._migrate(sql_fields, sql_fields_old, 1900 sql_fields_aux, logfile, 1901 fake_migrate=fake_migrate) 1902 self._db._close(logfile) 1903 return query
1904
1905 - def _migrate( 1906 self, 1907 sql_fields, 1908 sql_fields_old, 1909 sql_fields_aux, 1910 logfile, 1911 fake_migrate=False, 1912 ):
1913 ### make sure all field names are lower case to avoid conflicts 1914 sql_fields = dict((k.lower(), v) for k, v in sql_fields.items()) 1915 sql_fields_old = dict((k.lower(), v) for k, v in sql_fields_old.items()) 1916 sql_fields_aux = dict((k.lower(), v) for k, v in sql_fields_aux.items()) 1917 1918 keys = sql_fields.keys() 1919 for key in sql_fields_old: 1920 if not key in keys: 1921 keys.append(key) 1922 if self._db._dbname == 'mssql': 1923 new_add = '; ALTER TABLE %s ADD ' % self._tablename 1924 else: 1925 new_add = ', ADD ' 1926 1927 fields_changed = False 1928 sql_fields_current = copy.copy(sql_fields_old) 1929 for key in keys: 1930 if not key in sql_fields_old: 1931 sql_fields_current[key] = sql_fields[key] 1932 query = ['ALTER TABLE %s ADD %s %s;' % \ 1933 (self._tablename, key, sql_fields_aux[key].replace(', ', new_add))] 1934 elif self._db._dbname == 'sqlite': 1935 query = None 1936 elif not key in sql_fields: 1937 del sql_fields_current[key] 1938 if not self._db._dbname in ('firebird',): 1939 query = ['ALTER TABLE %s DROP COLUMN %s;' % (self._tablename, key)] 1940 else: 1941 query = ['ALTER TABLE %s DROP %s;' % (self._tablename, key)] 1942 elif sql_fields[key] != sql_fields_old[key] \ 1943 and not isinstance(self[key].type, SQLCustomType) \ 1944 and not (self[key].type.startswith('reference') and \ 1945 sql_fields[key].startswith('INT,') and \ 1946 sql_fields_old[key].startswith('INT NOT NULL,')): 1947 sql_fields_current[key] = sql_fields[key] 1948 t = self._tablename 1949 tt = sql_fields_aux[key].replace(', ', new_add) 1950 if not self._db._dbname in ('firebird',): 1951 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt), 1952 'UPDATE %s SET %s__tmp=%s;' % (t, key, key), 1953 'ALTER TABLE %s DROP COLUMN %s;' % (t, key), 1954 'ALTER TABLE %s ADD %s %s;' % (t, key, tt), 1955 'UPDATE %s SET %s=%s__tmp;' % (t, key, key), 1956 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)] 1957 else: 1958 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt), 1959 'UPDATE %s SET %s__tmp=%s;' % (t, key, key), 1960 'ALTER TABLE %s DROP %s;' % (t, key), 1961 'ALTER TABLE %s ADD %s %s;' % (t, key, tt), 1962 'UPDATE %s SET %s=%s__tmp;' % (t, key, key), 1963 'ALTER TABLE %s DROP %s__tmp;' % (t, key)] 1964 else: 1965 query = None 1966 1967 if query: 1968 fields_changed = True 1969 logfile.write('timestamp: %s\n' 1970 % datetime.datetime.today().isoformat()) 1971 self._db['_lastsql'] = '\n'.join(query) 1972 for sub_query in query: 1973 logfile.write(sub_query + '\n') 1974 if not fake_migrate: 1975 self._db._execute(sub_query) 1976 # caveat. mysql, oracle and firebird do not allow multiple alter table 1977 # in one transaction so we must commit partial transactions and 1978 # update self._dbt after alter table. 1979 if self._db._dbname in ['mysql', 'oracle', 'firebird']: 1980 self._db.commit() 1981 tfile = self._db._open(self._dbt, 'w') 1982 cPickle.dump(sql_fields_current, tfile) 1983 self._db._close(tfile) 1984 logfile.write('success!\n') 1985 else: 1986 logfile.write('faked!\n') 1987 1988 if fields_changed and not self._db._dbname in ['mysql', 'oracle', 'firebird']: 1989 self._db.commit() 1990 tfile = self._db._open(self._dbt, 'w') 1991 cPickle.dump(sql_fields_current, tfile) 1992 self._db._close(tfile)
1993
1994 - def create(self):
1995 """nothing to do; here for backward compatibility""" 1996 pass
1997
1998 - def _drop(self, mode = None):
1999 t = self._tablename 2000 c = mode or '' 2001 if self._db._dbname in ['oracle']: 2002 return ['DROP TABLE %s %s;' % (t, c), 'DROP SEQUENCE %s;' 2003 % t, self._sequence_name] 2004 elif self._db._dbname == 'firebird': 2005 return ['DROP TABLE %s %s;' % (t, c), 'DROP GENERATOR %s;' 2006 % t, self._sequence_name] 2007 elif self._db._dbname == 'mysql': 2008 # breaks db integrity but without this mysql does not drop table 2009 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % t,'SET FOREIGN_KEY_CHECKS=1;'] 2010 return ['DROP TABLE %s;' % t]
2011
2012 - def drop(self, mode = None):
2013 if self._dbt: 2014 logfile = self._db._open(self._logfilename, 'a') 2015 queries = self._drop(mode = mode) 2016 self._db['_lastsql'] = '\n'.join(queries) 2017 for query in queries: 2018 if self._dbt: 2019 logfile.write(query + '\n') 2020 self._db._execute(query) 2021 self._db.commit() 2022 del self._db[self._tablename] 2023 del self._db.tables[self._db.tables.index(self._tablename)] 2024 self._db._update_referenced_by(self._tablename) 2025 if self._dbt: 2026 self._db._delete(self._dbt) 2027 logfile.write('success!\n')
2028
2029 - def _insert(self, **fields):
2030 (fs, vs) = ([], []) 2031 invalid_fieldnames = [key for key in fields if not key in self.fields] 2032 if invalid_fieldnames: 2033 raise SyntaxError, 'invalid field names: %s' \ 2034 % repr(invalid_fieldnames) 2035 for fieldname in self.fields: 2036 if fieldname == 'id': 2037 continue 2038 field = self[fieldname] 2039 (ft, fd) = (field.type, field._db._dbname) 2040 if fieldname in fields: 2041 fs.append(fieldname) 2042 value = fields[fieldname] 2043 if hasattr(value,'id'): 2044 value = value.id 2045 elif ft == 'string' and isinstance(value,(str,unicode)): 2046 value = value[:field.length] 2047 vs.append(sql_represent(value, ft, fd, self._db._db_codec)) 2048 elif field.default != None: 2049 fs.append(fieldname) 2050 vs.append(sql_represent(field.default, ft, fd, self._db._db_codec)) 2051 elif field.compute != None: 2052 fs.append(fieldname) 2053 vs.append(sql_represent(field.compute(Row(fields)), ft, fd, self._db._db_codec)) 2054 elif field.required is True: 2055 raise SyntaxError,'Table: missing required field: %s'%field 2056 sql_f = ', '.join(fs) 2057 sql_v = ', '.join(vs) 2058 sql_t = self._tablename 2059 return 'INSERT INTO %s(%s) VALUES (%s);' % (sql_t, sql_f, sql_v)
2060
2061 - def bulk_insert(self, *items):
2062 """ this is here for competibility reasons with GAE """ 2063 return [self.insert(**item) for item in items]
2064
2065 - def insert(self, **fields):
2066 query = self._insert(**fields) 2067 self._db['_lastsql'] = query 2068 self._db._execute(query) 2069 if self._db._dbname == 'sqlite': 2070 id = self._db._cursor.lastrowid 2071 elif self._db._dbname == 'postgres': 2072 self._db._execute("select currval('%s')" % self._sequence_name) 2073 id = int(self._db._cursor.fetchone()[0]) 2074 elif self._db._dbname == 'mysql': 2075 self._db._execute('select last_insert_id();') 2076 id = int(self._db._cursor.fetchone()[0]) 2077 elif self._db._dbname in ['oracle']: 2078 self._db._execute('SELECT %s.currval FROM dual;' 2079 % self._sequence_name) 2080 id = int(self._db._cursor.fetchone()[0]) 2081 elif self._db._dbname == 'mssql' or self._db._dbname\ 2082 == 'mssql2': 2083 #self._db._execute('SELECT @@IDENTITY;') 2084 self._db._execute('SELECT SCOPE_IDENTITY();') 2085 id = int(self._db._cursor.fetchone()[0]) 2086 elif self._db._dbname == 'firebird': 2087 self._db._execute('SELECT gen_id(%s, 0) FROM rdb$database' 2088 % self._sequence_name) 2089 id = int(self._db._cursor.fetchone()[0]) 2090 elif self._db._dbname == 'informix': 2091 id = self._db._cursor.sqlerrd[1] 2092 elif self._db._dbname == 'db2': 2093 self._db._execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;'%self._tablename) 2094 id = int(self._db._cursor.fetchone()[0]) 2095 elif self._db._dbname == 'ingres': 2096 tmp_seqname=gen_ingres_sequencename(self._tablename) 2097 self._db._execute('select current value for %s' % tmp_seqname) 2098 id = int(self._db._cursor.fetchone()[0]) # don't really need int type cast here... 2099 else: 2100 id = None 2101 if not isinstance(id,int): 2102 return id 2103 rid = Reference(id) 2104 (rid._table, rid._record) = (self, None) 2105 return rid
2106
2107 - def import_from_csv_file( 2108 self, 2109 csvfile, 2110 id_map=None, 2111 null='<NULL>', 2112 unique='uuid', 2113 *args, **kwargs 2114 ):
2115 """ 2116 import records from csv file. Column headers must have same names as 2117 table fields. field 'id' is ignored. If column names read 'table.file' 2118 the 'table.' prefix is ignored. 2119 'unique' argument is a field which must be unique 2120 (typically a uuid field) 2121 """ 2122 2123 delimiter = kwargs.get('delimiter', ',') 2124 quotechar = kwargs.get('quotechar', '"') 2125 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL) 2126 2127 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting) 2128 colnames = None 2129 if isinstance(id_map, dict): 2130 if not self._tablename in id_map: 2131 id_map[self._tablename] = {} 2132 id_map_self = id_map[self._tablename] 2133 2134 def fix(field, value, id_map): 2135 if value == null: 2136 value = None 2137 elif not isinstance(field.type, SQLCustomType) and field.type.startswith('list:string'): 2138 value = bar_decode_string(value) 2139 elif not isinstance(field.type, SQLCustomType) and field.type.startswith('list:'): 2140 value = bar_decode_integer(value) 2141 elif id_map and not isinstance(field.type, SQLCustomType) and field.type.startswith('reference'): 2142 try: 2143 value = id_map[field.type[9:].strip()][value] 2144 except KeyError: 2145 pass 2146 return (field.name, value)
2147 2148 def is_id(colname): 2149 if colname in self: 2150 return self[colname].type == 'id' 2151 else: 2152 return False
2153 2154 for line in reader: 2155 if not line: 2156 break 2157 if not colnames: 2158 colnames = [x.split('.',1)[-1] for x in line][:len(line)] 2159 cols, cid = [], [] 2160 for i,colname in enumerate(colnames): 2161 if is_id(colname): 2162 cid = i 2163 else: 2164 cols.append(i) 2165 if colname == unique: 2166 unique_idx = i 2167 else: 2168 items = [fix(self[colnames[i]], line[i], id_map) for i in cols] 2169 # Validation. Check for duplicate of 'unique' &, 2170 # if present, update instead of insert. 2171 if not unique or unique not in colnames: 2172 new_id = self.insert(**dict(items)) 2173 else: 2174 unique_value = line[unique_idx] 2175 query = self._db[self][unique] == unique_value 2176 record = self._db(query).select().first() 2177 if record: 2178 record.update_record(**dict(items)) 2179 new_id = record[self._id.name] 2180 else: 2181 new_id = self.insert(**dict(items)) 2182 if id_map and cid != []: 2183 id_map_self[line[cid]] = new_id 2184
2185 - def on(self, query):
2186 return SQLJoin(self, query)
2187
2188 - def _truncate(self, mode = None):
2189 t = self._tablename 2190 c = mode or '' 2191 if self._db._dbname == 'sqlite': 2192 return ['DELETE FROM %s;' % t, 2193 "DELETE FROM sqlite_sequence WHERE name='%s';" % t] 2194 elif self._db._dbname == 'firebird': 2195 return ['DELETE FROM %s;' % t, 2196 'SET GENERATOR %s TO 0;' % self._sequence_name] 2197 else: 2198 return ['TRUNCATE TABLE %s %s;' % (t, c)]
2199
2200 - def truncate(self, mode = None):
2201 if self._dbt: 2202 logfile = self._db._open(self._logfilename, 'a') 2203 queries = self._truncate(mode = mode) 2204 self._db['_lastsql'] = '\n'.join(queries) 2205 for query in queries: 2206 if self._dbt: 2207 logfile.write(query + '\n') 2208 self._db._execute(query) 2209 self._db.commit() 2210 if self._dbt: 2211 logfile.write('success!\n')
2212 2213 2214 # added by Denes Lengyel (2009)
2215 -class KeyedTable(Table):
2216 2217 """ 2218 an instance of this class represents a database keyed table 2219 2220 Example:: 2221 2222 db = DAL(...) 2223 db.define_table('account', 2224 Field('accnum','integer'), 2225 Field('acctype'), 2226 Field('accdesc'), 2227 primarykey=['accnum','acctype']) 2228 db.users.insert(accnum=1000,acctype='A',accdesc='Assets') 2229 db.users.drop() 2230 2231 db.define_table('subacct', 2232 Field('sanum','integer'), 2233 Field('refnum','reference account.accnum'), 2234 Field('reftype','reference account.acctype'), 2235 Field('sadesc','string'), 2236 primarykey=['sanum'])) 2237 2238 Notes: 2239 1) primarykey is a list of the field names that make up the primary key 2240 2) all primarykey fields will have NOT NULL set even if not specified 2241 3) references are to other keyed tables only 2242 4) references must use tablename.fieldname format, as shown above 2243 5) update_record function is not available 2244 2245 """ 2246
2247 - def __init__( 2248 self, 2249 db, 2250 tablename, 2251 *fields, 2252 **args 2253 ):
2254 """ 2255 Initializes the table and performs checking on the provided fields. 2256 2257 If a field is of type Table, the fields (excluding 'id') from that table 2258 will be used instead. 2259 2260 :raises SyntaxError: when a supplied field is of incorrect type. 2261 """ 2262 2263 self._trigger_name = args.get('trigger_name', None) 2264 self._sequence_name = args.get('sequence_name', None) 2265 2266 for k,v in args.iteritems(): 2267 if k in ['trigger_name','sequence_name']: 2268 continue 2269 if k != 'primarykey': 2270 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, k) 2271 elif isinstance(v,list): 2272 self._primarykey=v 2273 else: 2274 raise SyntaxError, 'primarykey must be a list of fields from table "%s" ' %tablename 2275 2276 new_fields = [] 2277 2278 for field in fields: 2279 if hasattr(field,'_db'): 2280 field = copy.copy(field) 2281 if isinstance(field, Field): 2282 new_fields.append(field) 2283 elif isinstance(field, Table): 2284 new_fields += [copy.copy(field[f]) for f in 2285 field.fields if f != 'id'] 2286 else: 2287 raise SyntaxError, \ 2288 'define_table argument is not a Field: %s' % field 2289 fields = new_fields 2290 self._db = db 2291 self._tablename = tablename 2292 self.fields = SQLCallableList() 2293 self.virtualfields = [] 2294 fields = list(fields) 2295 2296 for field in fields: 2297 self.fields.append(field.name) 2298 self[field.name] = field 2299 field._tablename = self._tablename 2300 field._table = self 2301 field._db = self._db 2302 if field.requires == DEFAULT: 2303 field.requires = sqlhtml_validators(field) 2304 self.ALL = SQLALL(self) 2305 2306 for k in self._primarykey: 2307 if k not in self.fields: 2308 raise SyntaxError,\ 2309 'primarykey must be a list of fields from table "%s" ' %\ 2310 tablename 2311 else: 2312 self[k].notnull = True
2313 2314 # KeyedTable
2315 - def _create_references(self):
2316 self._referenced_by = [] 2317 for fieldname in self.fields: 2318 field=self[fieldname] 2319 if isinstance(field.type,str) and field.type.startswith('reference'): 2320 ref = field.type[10:].strip() 2321 refs = ref.split('.') 2322 if not ref: 2323 raise SyntaxError, 'Table: reference to nothing: %s' %ref 2324 if len(refs)!=2: 2325 raise SyntaxError, 'invalid reference: %s' %ref 2326 rtablename,rfieldname = refs 2327 if not rtablename in self._db.tables: 2328 raise SyntaxError,\ 2329 'Table: table "%s" does not exist' %rtablename 2330 rtable = self._db[rtablename] 2331 if not isinstance(rtable, KeyedTable): 2332 raise SyntaxError,\ 2333 'keyed tables can only reference other keyed tables (for now)' 2334 if self._tablename in rtable.fields: 2335 raise SyntaxError,\ 2336 'Field: table %s has same name as a field in referenced table "%s"' %\ 2337 (self._tablename, rtablename) 2338 if rfieldname not in rtable.fields: 2339 raise SyntaxError,\ 2340 "invalid field '%s' for referenced table '%s' in table '%s'" %(rfieldname, rtablename, self._tablename) 2341 rtable._referenced_by.append((self._tablename, field.name))
2342 2343 2344 # KeyedTable
2345 - def _build_query(self,key):
2346 query = None 2347 for k,v in key.iteritems(): 2348 if k in self._primarykey: 2349 if query: 2350 query = query & (self[k] == v) 2351 else: 2352 query = (self[k] == v) 2353 else: 2354 raise SyntaxError,\ 2355 'Field %s is not part of the primary key of %s'%\ 2356 (k,self._tablename) 2357 return query
2358 2359 # KeyedTable ok
2360 - def __getitem__(self, key):
2361 if not key: 2362 return None 2363 if isinstance(key, dict): 2364 query = self._build_query(key) 2365 rows = self._db(query).select() 2366 if rows: 2367 return rows[0] 2368 return None 2369 else: 2370 return dict.__getitem__(self, str(key))
2371 2372 # KeyedTable ok
2373 - def __setitem__(self, key, value):
2374 # ??? handle special case where primarykey has all fields ??? 2375 if isinstance(key, dict) and isinstance(value, dict): 2376 if set(key.keys())==set(self._primarykey): 2377 value = self._filter_fields(value) 2378 kv = {} 2379 kv.update(value) 2380 kv.update(key) 2381 if not self.insert(**kv): 2382 query = self._build_query(key) 2383 self._db(query).update(**self._filter_fields(value)) 2384 else: 2385 raise SyntaxError,\ 2386 'key must have all fields from primary key: %s'%\ 2387 (self._primarykey) 2388 else: 2389 if isinstance(key, dict): 2390 raise SyntaxError,\ 2391 'value must be a dictionary: %s'%value 2392 # 'key must be a dictionary with primary key fields: %s'%\ 2393 # self._primarykey 2394 dict.__setitem__(self, str(key), value)
2395 2396 # KeyedTable
2397 - def __delitem__(self, key):
2398 if isinstance(key, dict): 2399 query = self._build_query(key) 2400 if not self._db(query).delete(): 2401 raise SyntaxError, 'No such record: %s' % key 2402 # else: 2403 # raise SyntaxError,\ 2404 # 'key must have all fields from primary key: %s'%\ 2405 # (self._primarykey) 2406 else: 2407 raise SyntaxError,\ 2408 'key must be a dictionary with primary key fields: %s'%\ 2409 self._primarykey
2410 # if not str(key).isdigit() or not self._db(self.id == key).delete(): 2411 # raise SyntaxError, 'No such record: %s' % key 2412 2413 # KeyedTable
2414 - def __repr__(self):
2415 return '<KeyedTable ' + dict.__repr__(self) + '>'
2416 2417 # KeyedTable
2418 - def _create(self, migrate=True, fake_migrate=False):
2419 fields = [] 2420 sql_fields = {} 2421 sql_fields_aux = {} 2422 TFK = {} # table level FK 2423 for k in self.fields: 2424 field = self[k] 2425 if isinstance(field.type, SQLCustomType): 2426 ftype = field.type.native or field.type.type 2427 elif field.type.startswith('reference'): 2428 ref = field.type[10:].strip() 2429 constraint_name = '%s_%s__constraint' % (self._tablename, field.name) 2430 if self._db._dbname == 'oracle' and len(constraint_name) > 30: 2431 constraint_name = '%s_%s__constraint' % (self._tablename[:10], field.name[:7]) 2432 rtablename,rfieldname = ref.split('.') 2433 rtable = self._db[rtablename] 2434 rfield = rtable[rfieldname] 2435 # must be PK reference or unique 2436 if rfieldname in rtable._primarykey or rfield.unique: 2437 ftype = self._db._translator[rfield.type[:9]] %dict(length=rfield.length) 2438 # multicolumn primary key reference? 2439 if not rfield.unique and len(rtable._primarykey)>1 : 2440 # then it has to be a table level FK 2441 if rtablename not in TFK: 2442 TFK[rtablename] = {} 2443 TFK[rtablename][rfieldname] = field.name 2444 else: 2445 ftype = ftype + \ 2446 self._db._translator['reference FK'] %dict(\ 2447 constraint_name=constraint_name, 2448 table_name=self._tablename, 2449 field_name=field.name, 2450 foreign_key='%s (%s)'%(rtablename, rfieldname), 2451 on_delete_action=field.ondelete) 2452 else: 2453 raise SyntaxError,\ 2454 'primary key or unique field required in reference %s' %ref 2455 2456 elif not field.type in self._db._translator: 2457 raise SyntaxError, 'Field: unknown field type: %s for %s' % \ 2458 (field.type, field.name) 2459 else: 2460 ftype = self._db._translator[field.type]\ 2461 % dict(length=field.length) 2462 if not field.type.startswith('id') and not field.type.startswith('reference'): 2463 if field.notnull: 2464 ftype += ' NOT NULL' 2465 if field.unique: 2466 ftype += ' UNIQUE' 2467 2468 # add to list of fields 2469 sql_fields[field.name] = ftype 2470 2471 if field.default: 2472 sql_fields_aux[field.name] = ftype.replace('NOT NULL', 2473 self._db._translator['notnull'] 2474 % dict(default=sql_represent(field.default, 2475 field.type, self._db._dbname, self._db._db_codec))) 2476 else: 2477 sql_fields_aux[field.name] = ftype 2478 2479 fields.append('%s %s' % (field.name, ftype)) 2480 other = ';' 2481 2482 # backend-specific extensions to fields 2483 if self._db._dbname == 'mysql': 2484 other = ' ENGINE=InnoDB CHARACTER SET utf8;' 2485 2486 fields = ',\n '.join(fields) 2487 2488 for rtablename in TFK: 2489 rfields = TFK[rtablename] 2490 pkeys = self._db[rtablename]._primarykey 2491 fkeys = [ rfields[k] for k in pkeys ] 2492 fields = fields + ',\n ' + \ 2493 self._db._translator['reference TFK'] %\ 2494 dict(table_name=self._tablename, 2495 field_name=', '.join(fkeys), 2496 foreign_table=rtablename, 2497 foreign_key=', '.join(pkeys), 2498 on_delete_action=field.ondelete) 2499 2500 if self._primarykey: 2501 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \ 2502 (self._tablename, fields, self._db._translator['primarykey']%', '.join(self._primarykey),other) 2503 else: 2504 query = '''CREATE TABLE %s(\n %s\n)%s''' % \ 2505 (self._tablename, fields, other) 2506 if self._db._uri.startswith('sqlite:///'): 2507 path_encoding = sys.getfilesystemencoding() or \ 2508 locale.getdefaultlocale()[1] 2509 dbpath = self._db._uri[9:self._db._uri.rfind('/')]\ 2510 .decode('utf8').encode(path_encoding) 2511 else: 2512 dbpath = self._db._folder 2513 if not migrate: 2514 return query 2515 elif self._db._uri.startswith('sqlite:memory:'): 2516 self._dbt = None 2517 elif isinstance(migrate, str): 2518 self._dbt = os.path.join(dbpath, migrate) 2519 else: 2520 self._dbt = os.path.join(dbpath, '%s_%s.table' \ 2521 % (md5_hash(self._db._uri), self._tablename)) 2522 if self._dbt: 2523 self._logfilename = os.path.join(dbpath, 'sql.log') 2524 logfile = self._db._open(self._logfilename, 'a') 2525 else: 2526 logfile = None 2527 if not self._dbt or not self._db._exists(self._dbt): 2528 if self._dbt: 2529 logfile.write('timestamp: %s\n' 2530 % datetime.datetime.today().isoformat()) 2531 logfile.write(query + '\n') 2532 self._db['_lastsql'] = query 2533 if self._db._dbname == 'ingres': 2534 # pre-create table auto inc code (if needed) 2535 # keyed table already handled 2536 pass 2537 if not fake_migrate: 2538 self._db._execute(query) 2539 if self._db._dbname in ['oracle']: 2540 t = self._tablename 2541 self._db._execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE;' 2542 % self._sequence_name) 2543 self._db._execute('CREATE OR REPLACE TRIGGER %s BEFORE INSERT ON %s FOR EACH ROW BEGIN SELECT %s.nextval INTO :NEW.id FROM DUAL; END;\n' 2544 % (self._trigger_name, t, self._sequence_name)) 2545 elif self._db._dbname == 'firebird': 2546 t = self._tablename 2547 self._db._execute('create generator %s;' % self._sequence_name) 2548 self._db._execute('set generator %s to 0;' % self._sequence_name) 2549 self._db._execute('create trigger %s for %s active before insert position 0 as\nbegin\nif(new.id is null) then\nbegin\nnew.id = gen_id(%s, 1);\nend\nend;' 2550 % (self._trigger_name, t, self._sequence_name)) 2551 elif self._db._dbname == 'ingres': 2552 # post create table auto inc code (if needed) 2553 # modify table to btree for performance.... NOT sure if this will be faster or not. 2554 modify_tbl_sql='modify %s to btree unique on %s' % (self._tablename, ', '.join(['"%s"'%x for x in self._primarykey])) # could use same code for Table (with id column, if _primarykey is defined as ['id'] 2555 self._db._execute(modify_tbl_sql) 2556 self._db.commit() 2557 if self._dbt: 2558 tfile = self._db._open(self._dbt, 'w') 2559 cPickle.dump(sql_fields, tfile) 2560 self._db._close(tfile) 2561 if self._dbt: 2562 if fake_migrate: 2563 logfile.write('faked!\n') 2564 else: 2565 logfile.write('success!\n') 2566 else: 2567 tfile = self._db._open(self._dbt, 'r') 2568 sql_fields_old = cPickle.load(tfile) 2569 self._db._close(tfile) 2570 if sql_fields != sql_fields_old: 2571 self._migrate(sql_fields, sql_fields_old, 2572 sql_fields_aux, logfile) 2573 2574 return query
2575 2576 # KeyedTable
2577 - def insert(self, **fields):
2578 if self._db._dbname in ['mssql', 'mssql2', 'db2', 'ingres', 'informix']: 2579 query = self._insert(**fields) 2580 self._db['_lastsql'] = query 2581 try: 2582 self._db._execute(query) 2583 except Exception, e: 2584 if 'ingresdbi' in globals() and isinstance(e,ingresdbi.IntegrityError): 2585 return None 2586 if 'pyodbc' in globals() and isinstance(e,pyodbc.IntegrityError): 2587 return None 2588 if 'informixdb' in globals() and isinstance(e,informixdb.IntegrityError): 2589 return None 2590 raise e 2591 return dict( [ (k,fields[k]) for k in self._primarykey ]) 2592 else: 2593 return Table.insert(self,**fields)
2594 2595
2596 -class Expression(object):
2597
2598 - def __init__( 2599 self, 2600 name, 2601 type='string', 2602 db=None, 2603 alias=None, 2604 ):
2605 (self.name, self.type, self._db, self.alias) = (name, type, db, alias)
2606
2607 - def __str__(self):
2608 return self.name
2609
2610 - def __or__(self, other): # for use in sortby
2611 return Expression(str(self) + ', ' + str(other), None, None)
2612
2613 - def __invert__(self):
2614 if str(self)[-5:] == ' DESC': 2615 return Expression(str(self)[:-5], None, None) 2616 else: 2617 return Expression(str(self) + ' DESC', None, None)
2618 2619 # for use in Query 2620
2621 - def __eq__(self, value):
2622 return Query(self, '=', value)
2623
2624 - def __ne__(self, value):
2625 return Query(self, '<>', value)
2626
2627 - def __lt__(self, value):
2628 return Query(self, '<', value)
2629
2630 - def __le__(self, value):
2631 return Query(self, '<=', value)
2632
2633 - def __gt__(self, value):
2634 return Query(self, '>', value)
2635
2636 - def __ge__(self, value):
2637 return Query(self, '>=', value)
2638
2639 - def like(self, value):
2640 return Query(self, ' LIKE ', value)
2641
2642 - def belongs(self, value):
2643 return Query(self, ' IN ', value)
2644
2645 - def startswith(self, value):
2646 if self.type in ('string', 'text'): 2647 return Query(self, ' LIKE ', '%s%%' % value) 2648 else: 2649 raise RuntimeError, "startswith used with incompatible field type"
2650
2651 - def endswith(self, value):
2652 if self.type in ('string', 'text'): 2653 return Query(self, ' LIKE ', '%%%s' % value) 2654 else: 2655 raise RuntimeError, "endswith used with incompatible field type"
2656
2657 - def contains(self, value):
2658 if self.type in ('string', 'text'): 2659 return Query(self, ' LIKE ', '%%%s%%' % value) 2660 elif self.type.startswith('list:'): 2661 return Query(self, ' LIKE ', 2662 Expression("'%%|%s|%%'" \ 2663 % bar_escape(value).replace("'","''"))) 2664 else: 2665 raise RuntimeError, "contains user with incopatible field type"
2666
2667 - def with_alias(self,value):
2668 return Expression(str(self) + ' AS %s' % value, 2669 self.type, self._db, value)
2670 2671 # for use in both Query and sortby 2672
2673 - def __add__(self, other):
2674 return Expression('(%s+%s)' % (self, sql_represent(other, 2675 self.type, self._db._dbname, self._db._db_codec)), self.type, 2676 self._db)
2677
2678 - def __sub__(self, other):
2679 if self.type == 'integer': 2680 result_type = 'integer' 2681 elif self.type in ['date','time','datetime','double']: 2682 result_type = 'double' 2683 else: 2684 raise SyntaxError, "subtraction operation with incompatible field type" 2685 return Expression('(%s-%s)' % (self, sql_represent(other, 2686 self.type, self._db._dbname, self._db._db_codec)), 2687 result_type, 2688 self._db)
2689
2690 - def __mul__(self, other):
2691 return Expression('(%s*%s)' % (self, sql_represent(other, 2692 self.type, self._db._dbname, self._db._db_codec)), self.type, 2693 self._db)
2694
2695 - def __div__(self, other):
2696 return Expression('(%s/%s)' % (self, sql_represent(other, 2697 self.type, self._db._dbname, self._db._db_codec)), self.type, 2698 self._db)
2699
2700 - def len(self):
2701 return Expression('LENGTH(%s)' % self, 'integer', self._db)
2702
2703 - def __nonzero__(self):
2704 return True
2705
2706 -class SQLCustomType:
2707 """ 2708 allows defining of custom SQL types 2709 2710 Example:: 2711 2712 decimal = SQLCustomType( 2713 type ='double', 2714 native ='integer', 2715 encoder =(lambda x: int(float(x) * 100)), 2716 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) ) 2717 ) 2718 2719 db.define_table( 2720 'example', 2721 Field('value', type=decimal) 2722 ) 2723 2724 :param type: the web2py type (default = 'string') 2725 :param native: the backend type 2726 :param encoder: how to encode the value to store it in the backend 2727 :param decoder: how to decode the value retrieved from the backend 2728 :param validator: what validators to use ( default = None, will use the 2729 default validator for type) 2730 """ 2731
2732 - def __init__( 2733 self, 2734 type='string', 2735 native=None, 2736 encoder=None, 2737 decoder=None, 2738 validator=None, 2739 _class=None, 2740 ):
2741 2742 self.type = type 2743 self.native = native 2744 self.encoder = encoder or (lambda x: x) 2745 self.decoder = decoder or (lambda x: x) 2746 self.validator = validator 2747 self._class = _class or type
2748
2749 - def startswith(self, dummy=None):
2750 return False
2751
2752 - def __getslice__(self, a=0, b=100):
2753 return None
2754
2755 - def __getitem__(self, i):
2756 return None
2757
2758 - def __str__(self):
2759 return self._class
2760
2761 -class Field(Expression):
2762 2763 """ 2764 an instance of this class represents a database field 2765 2766 example:: 2767 2768 a = Field(name, 'string', length=32, default=None, required=False, 2769 requires=IS_NOT_EMPTY(), ondelete='CASCADE', 2770 notnull=False, unique=False, 2771 uploadfield=True, widget=None, label=None, comment=None, 2772 uploadfield=True, # True means store on disk, 2773 # 'a_field_name' means store in this field in db 2774 # False means file content will be discarded. 2775 writable=True, readable=True, update=None, authorize=None, 2776 autodelete=False, represent=None, uploadfolder=None, 2777 uploadseparate=False # upload to separate directories by uuid_keys 2778 # first 2 character and tablename.fieldname 2779 # False - old behaviour 2780 # True - put uploaded file in 2781 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]/ 2782 # directory) 2783 2784 to be used as argument of SQLDB.define_table 2785 2786 allowed field types: 2787 string, boolean, integer, double, text, blob, 2788 date, time, datetime, upload, password 2789 2790 strings must have a length or 512 by default. 2791 fields should have a default or they will be required in SQLFORMs 2792 the requires argument is used to validate the field input in SQLFORMs 2793 2794 """ 2795
2796 - def __init__( 2797 self, 2798 fieldname, 2799 type='string', 2800 length=None, 2801 default=DEFAULT, 2802 required=False, 2803 requires=DEFAULT, 2804 ondelete='CASCADE', 2805 notnull=False, 2806 unique=False, 2807 uploadfield=True, 2808 widget=None, 2809 label=None, 2810 comment=None, 2811 writable=True, 2812 readable=True, 2813 update=None, 2814 authorize=None, 2815 autodelete=False, 2816 represent=None, 2817 uploadfolder=None, 2818 uploadseparate=False, 2819 compute=None, 2820 ):
2821 2822 self.name = fieldname = cleanup(fieldname) 2823 if hasattr(Table,fieldname) or fieldname[0] == '_': 2824 raise SyntaxError, 'Field: invalid field name: %s' % fieldname 2825 if isinstance(type, Table): 2826 type = 'reference ' + type._tablename 2827 if length == None: 2828 length = 512 2829 self.type = type # 'string', 'integer' 2830 self.length = length # the length of the string 2831 if default==DEFAULT: 2832 self.default = update or None 2833 else: 2834 self.default = default 2835 self.required = required # is this field required 2836 self.ondelete = ondelete.upper() # this is for reference fields only 2837 self.notnull = notnull 2838 self.unique = unique 2839 self.uploadfield = uploadfield 2840 self.uploadfolder = uploadfolder 2841 self.uploadseparate = uploadseparate 2842 self.widget = widget 2843 self.label = label 2844 self.comment = comment 2845 self.writable = writable 2846 self.readable = readable 2847 self.update = update 2848 self.authorize = authorize 2849 self.autodelete = autodelete 2850 if not represent and type in ('list:integer','list:string'): 2851 represent=lambda x: ', '.join(str(y) for y in x or []) 2852 self.represent = represent 2853 self.compute = compute 2854 self.isattachment = True 2855 if self.label == None: 2856 self.label = ' '.join([x.capitalize() for x in 2857 fieldname.split('_')]) 2858 if requires is None: 2859 self.requires = [] 2860 else: 2861 self.requires = requires
2862
2863 - def store(self, file, filename=None, path=None):
2864 if hasattr(self,'custom_store'): 2865 return self.custom_store(file,filename,path) 2866 if not filename: 2867 filename = file.name 2868 filename = os.path.basename(filename.replace('/', os.sep)\ 2869 .replace('\\', os.sep)) 2870 m = re.compile('\.(?P<e>\w{1,5})$').search(filename) 2871 extension = m and m.group('e') or 'txt' 2872 uuid_key = web2py_uuid().replace('-', '')[-16:] 2873 encoded_filename = base64.b16encode(filename).lower() 2874 newfilename = '%s.%s.%s.%s' % \ 2875 (self._tablename, self.name, uuid_key, encoded_filename) 2876 newfilename = newfilename[:200] + '.' + extension 2877 if isinstance(self.uploadfield,Field): 2878 blob_uploadfield_name = self.uploadfield.uploadfield 2879 keys={self.uploadfield.name: newfilename, 2880 blob_uploadfield_name: file.read()} 2881 self.uploadfield._table.insert(**keys) 2882 elif self.uploadfield == True: 2883 if path: 2884 pass 2885 elif self.uploadfolder: 2886 path = self.uploadfolder 2887 else: 2888 path = os.path.join(self._db._folder, '..', 'uploads') 2889 if self.uploadseparate: 2890 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2]) 2891 if not os.path.exists(path): 2892 os.makedirs(path) 2893 pathfilename = os.path.join(path, newfilename) 2894 dest_file = open(pathfilename, 'wb') 2895 shutil.copyfileobj(file, dest_file) 2896 dest_file.close() 2897 return newfilename
2898
2899 - def retrieve(self, name, path=None):
2900 if hasattr(self,'custom_retrieve'): 2901 return self.custom_retrieve(name, path) 2902 if self.authorize or isinstance(self.uploadfield, str): 2903 row = self._db(self == name).select().first() 2904 if not row: 2905 raise HTTP(404) 2906 if self.authorize and not self.authorize(row): 2907 raise HTTP(403) 2908 try: 2909 m = regex_content.match(name) 2910 if not m or not self.isattachment: 2911 raise TypeError, 'Can\'t retrieve %s' % name 2912 filename = base64.b16decode(m.group('name'), True) 2913 filename = regex_cleanup_fn.sub('_', filename) 2914 except (TypeError, AttributeError): 2915 filename = name 2916 if isinstance(self.uploadfield, str): # ## if file is in DB 2917 return (filename, cStringIO.StringIO(row[self.uploadfield])) 2918 elif isinstance(self.uploadfield,Field): 2919 blob_uploadfield_name = self.uploadfield.uploadfield 2920 query = self.uploadfield == name 2921 data = self.uploadfield._table(query)[blob_uploadfield_name] 2922 return (filename, cStringIO.StringIO(data)) 2923 else: 2924 # ## if file is on filesystem 2925 if path: 2926 pass 2927 elif self.uploadfolder: 2928 path = self.uploadfolder 2929 else: 2930 path = os.path.join(self._db._folder, '..', 'uploads') 2931 if self.uploadseparate: 2932 t = m.group('table') 2933 f = m.group('field') 2934 u = m.group('uuidkey') 2935 path = os.path.join(path,"%s.%s" % (t,f),u[:2]) 2936 return (filename, open(os.path.join(path, name), 'rb'))
2937
2938 - def formatter(self, value):
2939 if value is None or not self.requires: 2940 return value 2941 if not isinstance(self.requires, (list, tuple)): 2942 requires = [self.requires] 2943 elif isinstance(self.requires, tuple): 2944 requires = list(self.requires) 2945 else: 2946 requires = copy.copy(self.requires) 2947 requires.reverse() 2948 for item in requires: 2949 if hasattr(item, 'formatter'): 2950 value = item.formatter(value) 2951 return value
2952
2953 - def validate(self, value):
2954 if not self.requires: 2955 return (value, None) 2956 requires = self.requires 2957 if not isinstance(requires, (list, tuple)): 2958 requires = [requires] 2959 for validator in requires: 2960 (value, error) = validator(value) 2961 if error: 2962 return (value, error) 2963 return (value, None)
2964
2965 - def lower(self):
2966 s = self._db._translator['lower'] % dict(field=str(self)) 2967 return Expression(s, 'string', self._db)
2968
2969 - def upper(self):
2970 s = self._db._translator['upper'] % dict(field=str(self)) 2971 return Expression(s, 'string', self._db)
2972
2973 - def year(self):
2974 s = self._db._translator['extract'] % dict(name='year', 2975 field=str(self)) 2976 return Expression(s, 'integer', self._db)
2977
2978 - def month(self):
2979 s = self._db._translator['extract'] % dict(name='month', 2980 field=str(self)) 2981 return Expression(s, 'integer', self._db)
2982
2983 - def day(self):
2984 s = self._db._translator['extract'] % dict(name='day', 2985 field=str(self)) 2986 return Expression(s, 'integer', self._db)
2987
2988 - def hour(self):
2989 s = self._db._translator['extract'] % dict(name='hour', 2990 field=str(self)) 2991 return Expression(s, 'integer', self._db)
2992
2993 - def minutes(self):
2994 s = self._db._translator['extract'] % dict(name='minute', 2995 field=str(self)) 2996 return Expression(s, 'integer', self._db)
2997
2998 - def seconds(self):
2999 s = self._db._translator['extract'] % dict(name='second', 3000 field=str(self)) 3001 return Expression(s, 'integer', self._db)
3002
3003 - def count(self):
3004 return Expression('COUNT(%s)' % str(self), 'integer', self._db)
3005
3006 - def sum(self):
3007 return Expression('SUM(%s)' % str(self), self.type, self._db)
3008
3009 - def max(self):
3010 return Expression('MAX(%s)' % str(self), self.type, self._db)
3011
3012 - def min(self):
3013 return Expression('MIN(%s)' % str(self), self.type, self._db)
3014
3015 - def __getslice__(self, start, stop):
3016 if start < 0: 3017 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1) 3018 else: 3019 pos0 = start + 1 3020 3021 if stop < 0: 3022 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0) 3023 elif stop == sys.maxint: 3024 length = self.len() 3025 else: 3026 length = '(%s - %s)' % (stop + 1, pos0) 3027 3028 d = dict(field=str(self), pos=pos0, length=length) 3029 s = self._db._translator['substring'] % d 3030 return Expression(s, 'string', self._db)
3031
3032 - def __getitem__(self, i):
3033 return self[i:i + 1]
3034
3035 - def __str__(self):
3036 try: 3037 return '%s.%s' % (self._tablename, self.name) 3038 except: 3039 return '<no table>.%s' % self.name
3040 3041 3042 SQLDB.Field = Field # necessary in gluon/globals.py session.connect 3043 SQLDB.Table = Table # necessary in gluon/globals.py session.connect 3044 3045
3046 -class Query(object):
3047 3048 """ 3049 a query object necessary to define a set. 3050 it can be stored or can be passed to SQLDB.__call__() to obtain a Set 3051 3052 Example:: 3053 3054 query = db.users.name=='Max' 3055 set = db(query) 3056 records = set.select() 3057 3058 :raises SyntaxError: when the query cannot be recognized 3059 """ 3060
3061 - def __init__( 3062 self, 3063 left, 3064 op=None, 3065 right=None, 3066 ):
3067 if op is None and right is None: 3068 self.sql = left 3069 elif right is None: 3070 if op == '=': 3071 self.sql = '%s %s' % (left, 3072 left._db._translator['is null']) 3073 elif op == '<>': 3074 self.sql = '%s %s' % (left, 3075 left._db._translator['is not null']) 3076 else: 3077 raise SyntaxError, 'Operation %s can\'t be used with None' % op 3078 elif op == ' IN ': 3079 if isinstance(right, str): 3080 self.sql = '%s%s(%s)' % (left, op, right[:-1]) 3081 elif hasattr(right, '__iter__'): 3082 r = ','.join([sql_represent(i, left.type, left._db, left._db._db_codec) 3083 for i in right]) 3084 self.sql = '%s%s(%s)' % (left, op, r) 3085 else: 3086 raise SyntaxError, 'Right argument of "IN" is not suitable' 3087 elif isinstance(right, (Field, Expression)): 3088 self.sql = '%s%s%s' % (left, op, right) 3089 else: 3090 right = sql_represent(right, left.type, left._db._dbname, left._db._db_codec) 3091 self.sql = '%s%s%s' % (left, op, right)
3092
3093 - def __and__(self, other):
3094 return Query('(%s AND %s)' % (self, other))
3095
3096 - def __or__(self, other):
3097 return Query('(%s OR %s)' % (self, other))
3098
3099 - def __invert__(self):
3100 return Query('(NOT %s)' % self)
3101
3102 - def __str__(self):
3103 return self.sql
3104 3105 3106 regex_tables = re.compile('(?P<table>[a-zA-Z]\w*)\.') 3107 regex_quotes = re.compile("'[^']*'") 3108 3109
3110 -def parse_tablenames(text):
3111 text = regex_quotes.sub('', text) 3112 while 1: 3113 i = text.find('IN (SELECT ') 3114 if i == -1: 3115 break 3116 (k, j, n) = (1, i + 11, len(text)) 3117 while k and j < n: 3118 c = text[j] 3119 if c == '(': 3120 k += 1 3121 elif c == ')': 3122 k -= 1 3123 j += 1 3124 text = text[:i] + text[j + 1:] 3125 items = regex_tables.findall(text) 3126 tables = {} 3127 for item in items: 3128 tables[item] = True 3129 return tables.keys()
3130 3131
3132 -def xorify(orderby):
3133 if not orderby: 3134 return None 3135 orderby2 = orderby[0] 3136 for item in orderby[1:]: 3137 orderby2 = orderby2 | item 3138 return orderby2
3139 3140
3141 -class Set(object):
3142 3143 """ 3144 a Set represents a set of records in the database, 3145 the records are identified by the where=Query(...) object. 3146 normally the Set is generated by SQLDB.__call__(Query(...)) 3147 3148 given a set, for example 3149 set = db(db.users.name=='Max') 3150 you can: 3151 set.update(db.users.name='Massimo') 3152 set.delete() # all elements in the set 3153 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10)) 3154 and take subsets: 3155 subset = set(db.users.id<5) 3156 """ 3157
3158 - def __init__(self, db, where=''):
3159 self._db = db 3160 self._tables = [] 3161 3162 # find out wchich tables are involved 3163 3164 self.sql_w = str(where or '') 3165 self._tables = parse_tablenames(self.sql_w)
3166 3167
3168 - def __call__(self, where):
3169 if isinstance(where,Table): 3170 where = where._id>0 3171 elif isinstance(where,Field): 3172 where = where!=None 3173 if self.sql_w: 3174 return Set(self._db, Query(self.sql_w) & where) 3175 else: 3176 return Set(self._db, where)
3177
3178 - def _select(self, *fields, **attributes):
3179 valid_attributes = [ 3180 'orderby', 3181 'groupby', 3182 'limitby', 3183 'required', 3184 'cache', 3185 'default', 3186 'requires', 3187 'left', 3188 'distinct', 3189 'having', 3190 ] 3191 invalid_keys = [key for key in attributes if not key in valid_attributes] 3192 if invalid_keys: 3193 raise SyntaxError, 'invalid select attributes: %s' % invalid_keys 3194 3195 # ## if not fields specified take them all from the requested tables 3196 3197 if not fields: 3198 fields = [self._db[table].ALL for table in self._tables] 3199 sql_f = ', '.join([str(f) for f in fields]) 3200 tablenames = parse_tablenames(self.sql_w + ' ' + sql_f) 3201 if len(tablenames) < 1: 3202 raise SyntaxError, 'Set: no tables selected' 3203 w2p_tablenames = [ t for t in tablenames if isinstance(self._db[t],Table) ] 3204 self.colnames = [c.strip() for c in sql_f.split(', ')] 3205 if self.sql_w: 3206 sql_w = ' WHERE ' + self.sql_w 3207 else: 3208 sql_w = '' 3209 sql_o = '' 3210 sql_s = 'SELECT' 3211 distinct = attributes.get('distinct', False) 3212 if distinct is True: 3213 sql_s += ' DISTINCT' 3214 elif distinct: 3215 sql_s += ' DISTINCT ON (%s)' % distinct 3216 if attributes.get('left', False): 3217 join = attributes['left'] 3218 command = self._db._translator['left join'] 3219 if not isinstance(join, (tuple, list)): 3220 join = [join] 3221 joint = [t._tablename for t in join if not isinstance(t, 3222 SQLJoin)] 3223 joinon = [t for t in join if isinstance(t, SQLJoin)] 3224 #patch join+left patch (solves problem with ordering in left joins) 3225 tables_to_merge={} 3226 [tables_to_merge.update(dict.fromkeys(parse_tablenames(str(t.query)))) for t 3227 in joinon] 3228 joinont = [t.table._tablename for t in joinon] 3229 [tables_to_merge.pop(t) for t in joinont if t in 3230 tables_to_merge] 3231 important_tablenames = joint + joinont + tables_to_merge.keys() 3232 excluded = [t for t in tablenames if not t in 3233 important_tablenames ] 3234 sql_t = ', '.join([ t for t in 3235 excluded + tables_to_merge.keys()]) 3236 if joint: 3237 sql_t += ' %s %s' % (command, ','.join([t for t in joint])) 3238 #/patch join+left patch 3239 for t in joinon: 3240 sql_t += ' %s %s' % (command, str(t)) 3241 else: 3242 sql_t = ', '.join(tablenames) 3243 if attributes.get('groupby', False): 3244 sql_o += ' GROUP BY %s' % attributes['groupby'] 3245 if attributes.get('having', False): 3246 sql_o += ' HAVING %s' % attributes['having'] 3247 orderby = attributes.get('orderby', False) 3248 if orderby: 3249 if isinstance(orderby, (list, tuple)): 3250 orderby = xorify(orderby) 3251 if str(orderby) == '<random>': 3252 sql_o += ' ORDER BY %s' % self._db._translator['random'] 3253 else: 3254 sql_o += ' ORDER BY %s' % orderby 3255 if attributes.get('limitby', False): 3256 # oracle does not support limitby 3257 (lmin, lmax) = attributes['limitby'] 3258 if self._db._dbname in ['oracle']: 3259 if not attributes.get('orderby', None) and w2p_tablenames: 3260 sql_o += ' ORDER BY %s' % ', '.join( 3261 ['%s.%s'%(t,self._db[t]._id.name) for t in w2p_tablenames]) 3262 if len(sql_w) > 1: 3263 sql_w_row = sql_w + ' AND w_row > %i' % lmin 3264 else: 3265 sql_w_row = 'WHERE w_row > %i' % lmin 3266 return '%s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o) 3267 #return '%s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row) 3268 elif self._db._dbname == 'mssql' or \ 3269 self._db._dbname == 'mssql2': 3270 if not attributes.get('orderby', None) and w2p_tablenames: 3271 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for x in ((hasattr(self._db[t],'_primarykey') and self._db[t]._primarykey) or [self._db[t]._id.name]) for t in w2p_tablenames]) 3272 sql_s += ' TOP %i' % lmax 3273 elif self._db._dbname == 'firebird': 3274 if not attributes.get('orderby', None) and w2p_tablenames: 3275 sql_o += ' ORDER BY %s' % ', '.join( 3276 ['%s.%s'%(t,self._db[t]._id.name) for t in w2p_tablenames]) 3277 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin) 3278 elif self._db._dbname == 'db2': 3279 if not attributes.get('orderby', None) and w2p_tablenames: 3280 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for x in ((hasattr(self._db[t],'_primarykey') and self._db[t]._primarykey) or [self._db[t].name]) for t in w2p_tablenames]) 3281 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax 3282 elif self._db._dbname == 'ingres': 3283 fetch_amt = lmax - lmin 3284 if fetch_amt: 3285 sql_s += ' FIRST %d ' % (fetch_amt, ) 3286 if lmin: 3287 # Requires Ingres 9.2+ 3288 sql_o += ' OFFSET %d' % (lmin, ) 3289 elif self._db._dbname == 'informix': 3290 fetch_amt = lmax - lmin 3291 dbms_version = int(self._db._connection.dbms_version.split('.')[0]) 3292 if lmin and (dbms_version >= 10): 3293 # Requires Informix 10.0+ 3294 sql_s += ' SKIP %d' % (lmin, ) 3295 if fetch_amt and (dbms_version >= 9): 3296 # Requires Informix 9.0+ 3297 sql_s += ' FIRST %d' % (fetch_amt, ) 3298 else: 3299 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin) 3300 return '%s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, 3301 sql_o)
3302
3303 - def select(self, *fields, **attributes):
3304 """ 3305 Always returns a Rows object, even if it may be empty 3306 """ 3307 3308 db=self._db 3309 def response(query): 3310 db['_lastsql'] = query 3311 db._execute(query) 3312 return db._cursor.fetchall()
3313 3314 if not attributes.get('cache', None): 3315 query = self._select(*fields, **attributes) 3316 rows = response(query) 3317 else: 3318 (cache_model, time_expire) = attributes['cache'] 3319 del attributes['cache'] 3320 query = self._select(*fields, **attributes) 3321 key = self._db._uri + '/' + query 3322 rows = cache_model(key, lambda : response(query), time_expire) 3323 3324 if isinstance(rows,tuple): 3325 rows = list(rows) 3326 if db._dbname in ['mssql', 'mssql2', 'db2']: 3327 rows = rows[(attributes.get('limitby', None) or (0,))[0]:] 3328 return self.parse(db, rows, self.colnames, SetClass=Set)
3329 3330 @staticmethod
3331 - def parse(db, rows, colnames, blob_decode=True, SetClass=None):
3332 virtualtables = [] 3333 new_rows = [] 3334 for (i,row) in enumerate(rows): 3335 new_row = Row() 3336 for j,colname in enumerate(colnames): 3337 value = row[j] 3338 if not table_field.match(colnames[j]): 3339 if not '_extra' in new_row: 3340 new_row['_extra'] = Row() 3341 new_row['_extra'][colnames[j]] = value 3342 select_as_parser = re.compile("\s+AS\s+(\S+)") 3343 new_column_name = select_as_parser.search(colnames[j]) 3344 if not new_column_name is None: 3345 column_name = new_column_name.groups(0) 3346 setattr(new_row,column_name[0],value) 3347 continue 3348 (tablename, fieldname) = colname.split('.') 3349 table = db[tablename] 3350 field = table[fieldname] 3351 field_type = field.type 3352 if field.type != 'blob' and isinstance(value, str): 3353 try: 3354 value = value.decode(db._db_codec) 3355 except Exception: 3356 pass 3357 if isinstance(value, unicode): 3358 value = value.encode('utf-8') 3359 if not tablename in new_row: 3360 colset = new_row[tablename] = Row() 3361 virtualtables.append((tablename,db[tablename].virtualfields)) 3362 else: 3363 colset = new_row[tablename] 3364 3365 if isinstance(field_type, SQLCustomType): 3366 colset[fieldname] = field_type.decoder(value) 3367 # field_type = field_type.type 3368 elif not isinstance(field_type, str) or value==None: 3369 colset[fieldname] = value 3370 elif isinstance(field_type, str) and \ 3371 field_type.startswith('reference'): 3372 referee = field_type[10:].strip() 3373 if not '.' in referee: 3374 colset[fieldname] = rid = Reference(value) 3375 (rid._table, rid._record) = (db[referee], None) 3376 else: ### reference not by id 3377 colset[fieldname] = value 3378 elif field_type == 'boolean': 3379 if value == True or value == 'T' or value == 't': 3380 colset[fieldname] = True 3381 else: 3382 colset[fieldname] = False 3383 elif field_type == 'date' \ 3384 and (not isinstance(value, datetime.date)\ 3385 or isinstance(value, datetime.datetime)): 3386 (y, m, d) = [int(x) for x in 3387 str(value)[:10].strip().split('-')] 3388 colset[fieldname] = datetime.date(y, m, d) 3389 elif field_type == 'time' \ 3390 and not isinstance(value, datetime.time): 3391 time_items = [int(x) for x in 3392 str(value)[:8].strip().split(':')[:3]] 3393 if len(time_items) == 3: 3394 (h, mi, s) = time_items 3395 else: 3396 (h, mi, s) = time_items + [0] 3397 colset[fieldname] = datetime.time(h, mi, s) 3398 elif field_type == 'datetime'\ 3399 and not isinstance(value, datetime.datetime): 3400 (y, m, d) = [int(x) for x in 3401 str(value)[:10].strip().split('-')] 3402 time_items = [int(x) for x in 3403 str(value)[11:19].strip().split(':')[:3]] 3404 if len(time_items) == 3: 3405 (h, mi, s) = time_items 3406 else: 3407 (h, mi, s) = time_items + [0] 3408 colset[fieldname] = datetime.datetime(y, m, d, h, mi, s) 3409 elif field_type == 'blob' and blob_decode: 3410 colset[fieldname] = base64.b64decode(str(value)) 3411 elif field_type.startswith('decimal'): 3412 decimals = [int(x) for x in field_type[8:-1].split(',')][-1] 3413 if field._db._dbname == 'sqlite': 3414 value = ('%.' + str(decimals) + 'f') % value 3415 if not isinstance(value, decimal.Decimal): 3416 value = decimal.Decimal(str(value)) 3417 colset[fieldname] = value 3418 elif field_type.startswith('list:integer'): 3419 if db._uri != 'gae': 3420 colset[fieldname] = bar_decode_integer(value) 3421 else: 3422 colset[fieldname] = value 3423 elif field_type.startswith('list:reference'): 3424 if db._uri != 'gae': 3425 colset[fieldname] = bar_decode_integer(value) 3426 else: 3427 colset[fieldname] = value 3428 elif field_type.startswith('list:string'): 3429 if db._uri != 'gae': 3430 colset[fieldname] = bar_decode_string(value) 3431 else: 3432 colset[fieldname] = value 3433 else: 3434 colset[fieldname] = value 3435 if field_type == 'id': 3436 id = colset[field.name] 3437 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a) 3438 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete() 3439 if SetClass: 3440 for (referee_table, referee_name) in \ 3441 table._referenced_by: 3442 s = db[referee_table][referee_name] 3443 colset[referee_table] = SetClass(db, s == id) 3444 colset['id'] = id 3445 new_rows.append(new_row) 3446 rowsobj = Rows(db, new_rows, colnames, rawrows=rows) 3447 for table, virtualfields in virtualtables: 3448 for item in virtualfields: 3449 try: 3450 rowsobj = rowsobj.setvirtualfields(**{table:item}) 3451 except KeyError: 3452 # to avoid breaking virtualfields when partial select 3453 pass 3454 return rowsobj
3455
3456 - def _count(self):
3457 return self._select('count(*)')
3458
3459 - def count(self):
3460 return self.select('count(*)')[0]._extra['count(*)']
3461
3462 - def _delete(self):
3463 if len(self._tables) != 1: 3464 raise SyntaxError, \ 3465 'Set: unable to determine what to delete' 3466 tablename = self._tables[0] 3467 if self.sql_w: 3468 sql_w = ' WHERE ' + self.sql_w 3469 else: 3470 sql_w = '' 3471 return 'DELETE FROM %s%s;' % (tablename, sql_w)
3472
3473 - def delete(self):
3474 query = self._delete() 3475 self.delete_uploaded_files() 3476 ### special code to handle CASCADE in SQLite 3477 db=self._db 3478 t = self._tables[0] 3479 if db._dbname=='sqlite' and db[t]._referenced_by: 3480 table = db[t] 3481 deleted = [x[table._id.name] for x in self.select(table._id)] 3482 ### end special code to handle CASCADE in SQLite 3483 self._db['_lastsql'] = query 3484 self._db._execute(query) 3485 try: 3486 counter = self._db._cursor.rowcount 3487 except: 3488 counter = None 3489 ### special code to handle CASCADE in SQLite 3490 if db._dbname=='sqlite' and counter: 3491 for tablename,fieldname in db[t]._referenced_by: 3492 f = db[tablename][fieldname] 3493 if f.type == 'reference ' + t and f.ondelete == 'CASCADE': 3494 db(db[tablename][fieldname].belongs(deleted)).delete() 3495 ### end special code to handle CASCADE in SQLite 3496 return counter
3497
3498 - def _update(self, **update_fields):
3499 tablenames = self._tables 3500 if len(tablenames) != 1: 3501 raise SyntaxError, 'Set: unable to determine what to do' 3502 sql_t = tablenames[0] 3503 (table, dbname) = (self._db[sql_t], self._db._dbname) 3504 update_fields.update(dict([(fieldname, table[fieldname].update) \ 3505 for fieldname in table.fields \ 3506 if not fieldname in update_fields \ 3507 and table[fieldname].update != None])) 3508 update_fields.update(dict([(fieldname, table[fieldname].compute(Row(update_fields))) \ 3509 for fieldname in table.fields \ 3510 if not fieldname in update_fields \ 3511 and table[fieldname].compute != None])) 3512 sql_v = 'SET ' + ', '.join(['%s=%s' % (field, 3513 sql_represent(value, 3514 table[field].type, dbname, self._db._db_codec)) 3515 for (field, value) in 3516 update_fields.items()]) 3517 if self.sql_w: 3518 sql_w = ' WHERE ' + self.sql_w 3519 else: 3520 sql_w = '' 3521 return 'UPDATE %s %s%s;' % (sql_t, sql_v, sql_w)
3522
3523 - def update(self, **update_fields):
3524 query = self._update(**update_fields) 3525 self.delete_uploaded_files(update_fields) 3526 self._db['_lastsql'] = query 3527 self._db._execute(query) 3528 try: 3529 return self._db._cursor.rowcount 3530 except: 3531 return None
3532
3533 - def delete_uploaded_files(self, upload_fields=None):
3534 table = self._db[self._tables[0]] 3535 3536 # ## mind uploadfield==True means file is not in DB 3537 3538 if upload_fields: 3539 fields = upload_fields.keys() 3540 else: 3541 fields = table.fields 3542 fields = [f for f in fields if table[f].type == 'upload' 3543 and table[f].uploadfield == True 3544 and table[f].autodelete] 3545 if not fields: 3546 return 3547 for record in self.select(*[table[f] for f in fields]): 3548 for fieldname in fields: 3549 field = table[fieldname] 3550 oldname = record.get(fieldname, None) 3551 if not oldname: 3552 continue 3553 if upload_fields and oldname == upload_fields[fieldname]: 3554 continue 3555 uploadfolder = field.uploadfolder 3556 if not uploadfolder: 3557 uploadfolder = os.path.join(self._db._folder, '..', 'uploads') 3558 if field.uploadseparate: 3559 items = oldname.split('.') 3560 uploadfolder = os.path.join(uploadfolder, 3561 "%s.%s" % (items[0], items[1]), 3562 items[2][:2]) 3563 oldpath = os.path.join(uploadfolder, oldname) 3564 if os.path.exists(oldpath): 3565 os.unlink(oldpath)
3566
3567 -def update_record(pack, a={}):
3568 (colset, table, id) = pack 3569 b = a or dict(colset) 3570 c = dict([(k,v) for (k,v) in b.items() \ 3571 if k in table.fields and not k=='id']) 3572 table._db(table._id==id).update(**c) 3573 for (k, v) in c.items(): 3574 colset[k] = v
3575
3576 -class Rows(object):
3577 3578 """ 3579 A wrapper for the return value of a select. It basically represents a table. 3580 It has an iterator and each row is represented as a dictionary. 3581 """ 3582 3583 # ## TODO: this class still needs some work to care for ID/OID 3584
3585 - def __init__( 3586 self, 3587 db=None, 3588 records=[], 3589 colnames=[], 3590 compact=True, 3591 rawrows=None, 3592 ):
3593 self.db = db 3594 self.records = records 3595 self.colnames = colnames 3596 self.compact = compact 3597 self.response = rawrows
3598
3599 - def setvirtualfields(self,**keyed_virtualfields):
3600 if not keyed_virtualfields: 3601 return self 3602 for row in self.records: 3603 for (tablename,virtualfields) in keyed_virtualfields.items(): 3604 attributes = dir(virtualfields) 3605 virtualfields.__dict__.update(row) 3606 if not tablename in row: 3607 box = row[tablename] = Row() 3608 else: 3609 box = row[tablename] 3610 for attribute in attributes: 3611 if attribute[0] != '_': 3612 method = getattr(virtualfields,attribute) 3613 if callable(method) and method.im_func.func_code.co_argcount: 3614 box[attribute]=method() 3615 return self
3616
3617 - def __and__(self, other):
3618 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects' 3619 records = self.records+other.records 3620 return Rows(self.db,records,self.colnames)
3621
3622 - def __or__(self, other):
3623 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects' 3624 records = self.records 3625 records += [record for record in other.records \ 3626 if not record in records] 3627 return Rows(self.db,records,self.colnames)
3628
3629 - def first(self):
3630 if not self.records: 3631 return None 3632 return self[0]
3633
3634 - def last(self):
3635 if not self.records: 3636 return None 3637 return self[-1]
3638
3639 - def __nonzero__(self):
3640 if len(self.records): 3641 return 1 3642 return 0
3643
3644 - def __len__(self):
3645 return len(self.records)
3646
3647 - def __getslice__(self, a, b):
3648 return Rows(self.db,self.records[a:b],self.colnames)
3649
3650 - def find(self, f):
3651 """ 3652 returns a new Rows object, a subset of the original object, filtered by the function f 3653 """ 3654 if not self.records: 3655 return Rows(self.db, [], self.colnames) 3656 records = [] 3657 for i in range(0,len(self)): 3658 row = self[i] 3659 if f(row): 3660 records.append(self.records[i]) 3661 return Rows(self.db, records, self.colnames)
3662
3663 - def exclude(self, f):
3664 """ 3665 removes elements from the calling Rows object, filtered by the function f, 3666 and returns a new Rows object containing the removed elements 3667 """ 3668 if not self.records: 3669 return Rows(self.db, [], self.colnames) 3670 removed = [] 3671 i=0 3672 while i<len(self): 3673 row = self[i] 3674 if f(row): 3675 removed.append(self.records[i]) 3676 del self.records[i] 3677 else: 3678 i += 1 3679 return Rows(self.db, removed, self.colnames)
3680
3681 - def sort(self, f, reverse=False):
3682 """ 3683 returns a list of sorted elements (not sorted in place) 3684 """ 3685 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
3686
3687 - def __getitem__(self, i):
3688 row = self.records[i] 3689 keys = row.keys() 3690 if self.compact and len(keys) == 1 and keys[0] != '_extra': 3691 return row[row.keys()[0]] 3692 return row
3693
3694 - def as_list(self, 3695 compact=True, 3696 storage_to_dict=True, 3697 datetime_to_str=True):
3698 """ 3699 returns the data as a list or dictionary. 3700 :param storage_to_dict: when True returns a dict, otherwise a list(default True) 3701 :param datetime_to_str: convert datetime fields as strings (default True) 3702 """ 3703 (oc, self.compact) = (self.compact, compact) 3704 if storage_to_dict: 3705 items = [item.as_dict(datetime_to_str) for item in self] 3706 else: 3707 items = [item for item in self] 3708 self.compact = compact 3709 return items
3710 3711
3712 - def as_dict(self, 3713 key='id', 3714 compact=True, 3715 storage_to_dict=True, 3716 datetime_to_str=True):
3717 """ 3718 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False) 3719 3720 :param key: the name of the field to be used as dict key, normally the id 3721 :param compact: ? (default True) 3722 :param storage_to_dict: when True returns a dict, otherwise a list(default True) 3723 :param datetime_to_str: convert datetime fields as strings (default True) 3724 """ 3725 rows = self.as_list(compact, storage_to_dict, datetime_to_str) 3726 if isinstance(key,str) and key.count('.')==1: 3727 (table, field) = key.split('.') 3728 return dict([(r[table][field],r) for r in rows]) 3729 elif isinstance(key,str): 3730 return dict([(r[key],r) for r in rows]) 3731 else: 3732 return dict([(key(r),r) for r in rows])
3733
3734 - def __iter__(self):
3735 """ 3736 iterator over records 3737 """ 3738 3739 for i in xrange(len(self)): 3740 yield self[i]
3741 3742
3743 - def export_to_csv_file(self, ofile, null='<NULL>', *args, **kwargs):
3744 """ 3745 export data to csv, the first line contains the column names 3746 3747 :param ofile: where the csv must be exported to 3748 :param null: how null values must be represented (default '<NULL>') 3749 :param delimiter: delimiter to separate values (default ',') 3750 :param quotechar: character to use to quote string values (default '"') 3751 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL) 3752 :param represent: use the fields .represent value (default False) 3753 :param colnames: list of column names to use (default self.colnames) 3754 This will only work when exporting rows objects!!!! 3755 DO NOT use this with db.export_to_csv() 3756 """ 3757 delimiter = kwargs.get('delimiter', ',') 3758 quotechar = kwargs.get('quotechar', '"') 3759 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL) 3760 represent = kwargs.get('represent', False) 3761 writer = csv.writer(ofile, delimiter=delimiter, 3762 quotechar=quotechar, quoting=quoting) 3763 colnames = kwargs.get('colnames', self.colnames) 3764 # a proper csv starting with the column names 3765 writer.writerow(colnames) 3766 3767 def none_exception(value): 3768 """ 3769 returns a cleaned up value that can be used for csv export: 3770 - unicode text is encoded as such 3771 - None values are replaced with the given representation (default <NULL>) 3772 """ 3773 if value == None: 3774 return null 3775 elif isinstance(value, unicode): 3776 return value.encode('utf8') 3777 elif isinstance(value, Reference): 3778 return int(value) 3779 elif hasattr(value, 'isoformat'): 3780 return value.isoformat()[:19].replace('T', ' ') 3781 elif isinstance(value, (list,tuple)): # for type='list:..' 3782 return bar_encode(value) 3783 return value
3784 3785 for record in self: 3786 row = [] 3787 for col in self.colnames: 3788 if not table_field.match(col): 3789 row.append(record._extra[col]) 3790 else: 3791 (t, f) = col.split('.') 3792 field = self.db[t][f] 3793 if isinstance(record.get(t, None), (Row,dict)): 3794 value = record[t][f] 3795 else: 3796 value = record[f] 3797 if represent and field.represent: 3798 value = field.represent(value) 3799 row.append(none_exception(value)) 3800 writer.writerow(row)
3801
3802 - def __str__(self):
3803 """ 3804 serializes the table into a csv file 3805 """ 3806 3807 s = cStringIO.StringIO() 3808 self.export_to_csv_file(s) 3809 return s.getvalue()
3810
3811 - def xml(self):
3812 """ 3813 serializes the table using sqlhtml.SQLTABLE (if present) 3814 """ 3815 3816 import sqlhtml 3817 return sqlhtml.SQLTABLE(self).xml()
3818
3819 - def json(self, mode='object'):
3820 """ 3821 serializes the table to a JSON list of objects 3822 """ 3823 3824 mode = mode.lower() 3825 if not mode in ['object', 'array']: 3826 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode 3827 3828 def inner_loop(record, col): 3829 (t, f) = col.split('.') 3830 res = None 3831 if not table_field.match(col): 3832 res = record._extra[col] 3833 else: 3834 if isinstance(record.get(t, None), Row): 3835 res = record[t][f] 3836 else: 3837 res = record[f] 3838 if mode == 'object': 3839 return (f, res) 3840 else: 3841 return res
3842 3843 if mode == 'object': 3844 items = [dict([inner_loop(record, col) for col in 3845 self.colnames]) for record in self] 3846 else: 3847 items = [[inner_loop(record, col) for col in self.colnames] 3848 for record in self] 3849 return json(items) 3850 3851
3852 -def Rows_unpickler(data):
3853 return cPickle.loads(data)
3854 -def Rows_pickler(data):
3855 return Rows_unpickler, \ 3856 (cPickle.dumps(data.as_list(storage_to_dict=True, 3857 datetime_to_str=False)),)
3858 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler) 3859 3860
3861 -def test_all():
3862 """ 3863 3864 Create a table with all possible field types 3865 'sqlite://test.db' 3866 'mysql://root:none@localhost/test' 3867 'postgres://mdipierro:none@localhost/test' 3868 'mssql://web2py:none@A64X2/web2py_test' 3869 'oracle://username:password@database' 3870 'firebird://user:password@server:3050/database' 3871 'db2://DSN=dsn;UID=user;PWD=pass' 3872 'firebird_embedded://username:password@c://path') 3873 'informix://user:password@server:3050/database' 3874 'gae' # for google app engine 3875 3876 >>> if len(sys.argv)<2: db = SQLDB(\"sqlite://test.db\") 3877 >>> if len(sys.argv)>1: db = SQLDB(sys.argv[1]) 3878 >>> tmp = db.define_table('users',\ 3879 Field('stringf', 'string', length=32, required=True),\ 3880 Field('booleanf', 'boolean', default=False),\ 3881 Field('passwordf', 'password', notnull=True),\ 3882 Field('blobf', 'blob'),\ 3883 Field('uploadf', 'upload'),\ 3884 Field('integerf', 'integer', unique=True),\ 3885 Field('doublef', 'double', unique=True,notnull=True),\ 3886 Field('datef', 'date', default=datetime.date.today()),\ 3887 Field('timef', 'time'),\ 3888 Field('datetimef', 'datetime'),\ 3889 migrate='test_user.table') 3890 3891 Insert a field 3892 3893 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\ 3894 uploadf=None, integerf=5, doublef=3.14,\ 3895 datef=datetime.date(2001, 1, 1),\ 3896 timef=datetime.time(12, 30, 15),\ 3897 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15)) 3898 1 3899 3900 Drop the table 3901 3902 >>> db.users.drop() 3903 3904 Examples of insert, select, update, delete 3905 3906 >>> tmp = db.define_table('person',\ 3907 Field('name'),\ 3908 Field('birth','date'),\ 3909 migrate='test_person.table') 3910 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22') 3911 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21') 3912 >>> len(db().select(db.person.ALL)) 3913 2 3914 >>> me = db(db.person.id==person_id).select()[0] # test select 3915 >>> me.name 3916 'Massimo' 3917 >>> db(db.person.name=='Massimo').update(name='massimo') # test update 3918 1 3919 >>> db(db.person.name=='Marco').delete() # test delete 3920 1 3921 3922 Update a single record 3923 3924 >>> me.update_record(name=\"Max\") 3925 >>> me.name 3926 'Max' 3927 3928 Examples of complex search conditions 3929 3930 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select()) 3931 1 3932 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select()) 3933 1 3934 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select()) 3935 1 3936 >>> me = db(db.person.id==person_id).select(db.person.name)[0] 3937 >>> me.name 3938 'Max' 3939 3940 Examples of search conditions using extract from date/datetime/time 3941 3942 >>> len(db(db.person.birth.month()==12).select()) 3943 1 3944 >>> len(db(db.person.birth.year()>1900).select()) 3945 1 3946 3947 Example of usage of NULL 3948 3949 >>> len(db(db.person.birth==None).select()) ### test NULL 3950 0 3951 >>> len(db(db.person.birth!=None).select()) ### test NULL 3952 1 3953 3954 Examples of search consitions using lower, upper, and like 3955 3956 >>> len(db(db.person.name.upper()=='MAX').select()) 3957 1 3958 >>> len(db(db.person.name.like('%ax')).select()) 3959 1 3960 >>> len(db(db.person.name.upper().like('%AX')).select()) 3961 1 3962 >>> len(db(~db.person.name.upper().like('%AX')).select()) 3963 0 3964 3965 orderby, groupby and limitby 3966 3967 >>> people = db().select(db.person.name, orderby=db.person.name) 3968 >>> order = db.person.name|~db.person.birth 3969 >>> people = db().select(db.person.name, orderby=order) 3970 3971 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name) 3972 3973 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100)) 3974 3975 Example of one 2 many relation 3976 3977 >>> tmp = db.define_table('dog',\ 3978 Field('name'),\ 3979 Field('birth','date'),\ 3980 Field('owner',db.person),\ 3981 migrate='test_dog.table') 3982 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id) 3983 1 3984 3985 A simple JOIN 3986 3987 >>> len(db(db.dog.owner==db.person.id).select()) 3988 1 3989 3990 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id))) 3991 1 3992 3993 Drop tables 3994 3995 >>> db.dog.drop() 3996 >>> db.person.drop() 3997 3998 Example of many 2 many relation and Set 3999 4000 >>> tmp = db.define_table('author', Field('name'),\ 4001 migrate='test_author.table') 4002 >>> tmp = db.define_table('paper', Field('title'),\ 4003 migrate='test_paper.table') 4004 >>> tmp = db.define_table('authorship',\ 4005 Field('author_id', db.author),\ 4006 Field('paper_id', db.paper),\ 4007 migrate='test_authorship.table') 4008 >>> aid = db.author.insert(name='Massimo') 4009 >>> pid = db.paper.insert(title='QCD') 4010 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid) 4011 4012 Define a Set 4013 4014 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id)) 4015 >>> rows = authored_papers.select(db.author.name, db.paper.title) 4016 >>> for row in rows: print row.author.name, row.paper.title 4017 Massimo QCD 4018 4019 Example of search condition using belongs 4020 4021 >>> set = (1, 2, 3) 4022 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL) 4023 >>> print rows[0].title 4024 QCD 4025 4026 Example of search condition using nested select 4027 4028 >>> nested_select = db()._select(db.authorship.paper_id) 4029 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL) 4030 >>> print rows[0].title 4031 QCD 4032 4033 Example of expressions 4034 4035 >>> mynumber = db.define_table('mynumber', Field('x', 'integer')) 4036 >>> db(mynumber.id>0).delete() 4037 0 4038 >>> for i in range(10): tmp = mynumber.insert(x=i) 4039 >>> db(mynumber.id>0).select(mynumber.x.sum())[0]._extra[mynumber.x.sum()] 4040 45 4041 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0]._extra[mynumber.x + 2] 4042 5 4043 4044 Output in csv 4045 4046 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip() 4047 author.name,paper.title\r 4048 Massimo,QCD 4049 4050 Delete all leftover tables 4051 4052 # >>> SQLDB.distributed_transaction_commit(db) 4053 4054 >>> db.mynumber.drop() 4055 >>> db.authorship.drop() 4056 >>> db.author.drop() 4057 >>> db.paper.drop() 4058 """
4059 4060 SQLField = Field 4061 SQLTable = Table 4062 SQLXorable = Expression 4063 SQLQuery = Query 4064 SQLSet = Set 4065 SQLRows = Rows 4066 SQLStorage = Row 4067 BaseAdapter = SQLDB 4068
4069 -def DAL(uri='sqlite:memory:', 4070 pool_size=0, 4071 folder=None, 4072 db_codec='UTF-8', 4073 check_reserved=None, 4074 migrate=True, 4075 fake_migrate=False, 4076 decode_credentials=False):
4077 if uri == 'gae': 4078 import contrib.gql 4079 return contrib.gql.GQLDB() 4080 else: 4081 uris = isinstance(uri,(list,tuple)) and uri or [uri] 4082 for k in range(5): 4083 for uri in uris: 4084 try: 4085 return SQLDB(uri, pool_size=pool_size, folder=folder, 4086 db_codec=db_codec, check_reserved=check_reserved, 4087 migrate=migrate, fake_migrate=fake_migrate, 4088 decode_credentials=decode_credentials) 4089 except SyntaxError, exception: 4090 raise SyntaxError, exception 4091 except Exception, exception: 4092 if uri==uris[-1]: 4093 time.sleep(1) 4094 raise RuntimeError, "%s (tried 5 times)" % exception
4095 4096 if __name__ == '__main__': 4097 import doctest 4098 doctest.testmod() 4099