[merge] backport stable fixes into default

authorAurelien Campeas <aurelien.campeas@logilab.fr>
changeset611663348158
branchdefault
phasepublic
hiddenno
parent revision#784d6f300070 [reledit] repair some brokennes, #bcdf22734059 Abstract the support for ORDER BY and LIMIT/OFFSET SQL generation
child revision#ef29d3ea3909 backport stable
files modified by this revision
.hgignore
.hgtags
__pkginfo__.py
appobject.py
cwconfig.py
dbapi.py
debian/control
devtools/__init__.py
devtools/cwwindmill.py
devtools/httptest.py
devtools/test/unittest_httptest.py
devtools/testlib.py
doc/book/en/admin/setup.rst
doc/book/en/annexes/rql/debugging.rst
doc/book/en/annexes/rql/language.rst
doc/book/en/devrepo/testing.rst
doc/book/en/tutorials/advanced/part03_bfss.rst
entities/test/unittest_wfobjs.py
hooks/test/unittest_syncschema.py
migration.py
misc/migration/3.10.9_Any.py
rtags.py
schema.py
server/__init__.py
server/hook.py
server/mssteps.py
server/querier.py
server/repository.py
server/session.py
server/sources/rql2sql.py
server/sqlutils.py
server/test/unittest_ldapuser.py
server/test/unittest_migractions.py
server/test/unittest_msplanner.py
server/test/unittest_multisources.py
server/test/unittest_querier.py
server/test/unittest_rql2sql.py
server/test/unittest_security.py
test/unittest_migration.py
vregistry.py
web/application.py
web/data/cubicweb.css
web/data/cubicweb.old.css
web/data/uiprops.py
web/propertysheet.py
web/views/actions.py
web/views/basecomponents.py
web/views/basetemplates.py
web/views/ibreadcrumbs.py
wsgi/handler.py
# HG changeset patch
# User Aurelien Campeas <aurelien.campeas@logilab.fr>
# Date 1300976473 -3600
# Thu Mar 24 15:21:13 2011 +0100
# Node ID 61166334815826a4a7cf8ab7b2a3b7cfd7a2c0c8
# Parent 784d6f3000704f436059efb7348f38b720016ed5
# Parent bcdf227340591fb0cad7b3137efdbd0d89ceca91
[merge] backport stable fixes into default

diff --git a/.hgignore b/.hgignore
@@ -7,5 +7,11 @@
1  \.old$
2  \~$
3  \#.*?\#$
4  \.swp$
5  ^doc/book/en/apidoc$
6 +\.old$
7 +syntax: regexp
8 +.*/data/database/.*\.sqlite
9 +.*/data/database/.*\.config
10 +.*/data/database/tmpdb.*
11 +
diff --git a/.hgtags b/.hgtags
@@ -179,9 +179,10 @@
12  1484257fe9aeb29d0210e635c12ae5b3d6118cfb cubicweb-debian-version-3.10.6-1
13  1959d97ebf2e6a0f7cd05d4cc48bb955c4351da5 cubicweb-version-3.10.7
14  bf5d9a1415e3c9abe6b68ba3b24a8ad741f9de3c cubicweb-debian-version-3.10.7-1
15  e581a86a68f089946a98c966ebca7aee58a5718f cubicweb-version-3.10.8
16  132b525de25bc75ed6389c45aee77e847cb3a437 cubicweb-debian-version-3.10.8-1
17 +48f468f33704e401a8e7907e258bf1ac61eb8407 cubicweb-version-3.9.x
18  37432cede4fe55b97fc2e9be0a2dd20e8837a848 cubicweb-version-3.11.0
19  8daabda9f571863e8754f8ab722744c417ba3abf cubicweb-debian-version-3.11.0-1
20  d0410eb4d8bbf657d7f32b0c681db09b1f8119a0 cubicweb-version-3.11.1
21  77318f1ec4aae3523d455e884daf3708c3c79af7 cubicweb-debian-version-3.11.1-1
diff --git a/__pkginfo__.py b/__pkginfo__.py
@@ -50,11 +50,11 @@
22      'simplejson': '>= 2.0.9',
23      'lxml': '',
24      'Twisted': '',
25      # XXX graphviz
26      # server dependencies
27 -    'logilab-database': '>= 1.3.3',
28 +    'logilab-database': '>= 1.4.0',
29      'pysqlite': '>= 2.5.5', # XXX install pysqlite2
30      }
31 
32  __recommends__ = {
33      'Pyro': '>= 3.9.1, < 4.0.0',
diff --git a/appobject.py b/appobject.py
@@ -574,6 +574,10 @@
34 
35      @deprecated('[3.6] use self.cw_propval')
36      def propval(self, propid):
37          return self._cw.property_value(self._cwpropkey(propid))
38 
39 +    # these are overridden by set_log_methods below
40 +    # only defining here to prevent pylint from complaining
41 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
42 +
43  set_log_methods(AppObject, getLogger('cubicweb.appobject'))
diff --git a/cwconfig.py b/cwconfig.py
@@ -1180,10 +1180,17 @@
44              smtp.close()
45          finally:
46              SMTP_LOCK.release()
47          return True
48 
49 +    # these are overridden by set_log_methods below
50 +    # only defining here to prevent pylint from complaining
51 +    @classmethod
52 +    def debug(cls, msg, *a, **kw):
53 +        pass
54 +    info = warning = error = critical = exception = debug 
55 +
56  set_log_methods(CubicWebNoAppConfiguration,
57                  logging.getLogger('cubicweb.configuration'))
58 
59  # alias to get a configuration instance from an instance id
60  instance_configuration = CubicWebConfiguration.config_for
diff --git a/dbapi.py b/dbapi.py
@@ -373,10 +373,13 @@
61 
62      @deprecated('[3.8] use direct access to req.session.data dictionary')
63      def del_session_data(self, key):
64          self.session.data.pop(key, None)
65 
66 +    # these are overridden by set_log_methods below
67 +    # only defining here to prevent pylint from complaining
68 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
69 
70  set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi'))
71 
72 
73  # exceptions ##################################################################
diff --git a/debian/control b/debian/control
@@ -31,11 +31,11 @@
74  Architecture: all
75  XB-Python-Version: ${python:Versions}
76  Conflicts: cubicweb-multisources
77  Replaces: cubicweb-multisources
78  Provides: cubicweb-multisources
79 -Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.3.3), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
80 +Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.4.0), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
81  Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
82  Description: server part of the CubicWeb framework
83   CubicWeb is a semantic web application framework.
84   .
85   This package provides the repository server part of the system.
diff --git a/devtools/__init__.py b/devtools/__init__.py
@@ -15,21 +15,30 @@
86  #
87  # You should have received a copy of the GNU Lesser General Public License along
88  # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
89  """Test tools for cubicweb"""
90 
91 +from __future__ import with_statement
92 +
93  __docformat__ = "restructuredtext en"
94 
95  import os
96  import sys
97  import logging
98 +import shutil
99 +import pickle
100 +import glob
101 +import warnings
102  from datetime import timedelta
103  from os.path import (abspath, join, exists, basename, dirname, normpath, split,
104 -                     isfile, isabs, splitext)
105 +                     isfile, isabs, splitext, isdir, expanduser)
106 +from functools import partial
107 +import hashlib
108 
109  from logilab.common.date import strptime
110 -from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError, schema, cwconfig
111 +from logilab.common.decorators import cached, clear_cache
112 +from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError, schema, cwconfig, BadConnectionId
113  from cubicweb.server.serverconfig import ServerConfiguration
114  from cubicweb.etwist.twconfig import TwistedConfiguration
115 
116  cwconfig.CubicWebConfiguration.cls_adjust_sys_path()
117 
@@ -76,24 +85,53 @@
118                     'admin' : {'login': u'admin',
119                                'password': u'gingkow',
120                                },
121                     }
122 
123 +def turn_repo_off(repo):
124 +    """ Idea: this is less costly than a full re-creation of the repo object.
125 +    off:
126 +    * session are closed,
127 +    * pools are closed
128 +    * system source is shutdown
129 +    """
130 +    if not repo._needs_refresh:
131 +        for sessionid in list(repo._sessions):
132 +            warnings.warn('%s Open session found while turning repository off'
133 +                          %sessionid, RuntimeWarning)
134 +            try:
135 +                repo.close(sessionid)
136 +            except BadConnectionId: #this is strange ? thread issue ?
137 +                print 'XXX unknown session', sessionid
138 +        for pool in repo.pools:
139 +            pool.close(True)
140 +        repo.system_source.shutdown()
141 +        repo._needs_refresh = True
142 +        repo._has_started = False
143 +
144 +def turn_repo_on(repo):
145 +    """Idea: this is less costly than a full re-creation of the repo object.
146 +    on:
147 +    * pools are connected
148 +    * cache are cleared
149 +    """
150 +    if repo._needs_refresh:
151 +        for pool in repo.pools:
152 +            pool.reconnect()
153 +        repo._type_source_cache = {}
154 +        repo._extid_cache = {}
155 +        repo.querier._rql_cache = {}
156 +        for source in repo.sources:
157 +            source.reset_caches()
158 +        repo._needs_refresh = False
159 +
160 
161  class TestServerConfiguration(ServerConfiguration):
162      mode = 'test'
163      set_language = False
164      read_instance_schema = False
165      init_repository = True
166 -    db_require_setup = True
167 -    options = cwconfig.merge_options(
168 -        ServerConfiguration.options +
169 -        tuple((opt, optdict) for opt, optdict in TwistedConfiguration.options
170 -              if opt in ('anonymous-user', 'anonymous-password')))
171 -    # By default anonymous login are allow but some test need to deny of to
172 -    # change the default user. Set it to None to prevent anonymous login.
173 -    anonymous_credential = ('anon', 'anon')
174 
175      def __init__(self, appid='data', apphome=None, log_threshold=logging.CRITICAL+10):
176          # must be set before calling parent __init__
177          if apphome is None:
178              if exists(appid):
@@ -104,23 +142,32 @@
179          ServerConfiguration.__init__(self, appid)
180          self.init_log(log_threshold, force=True)
181          # need this, usually triggered by cubicweb-ctl
182          self.load_cwctl_plugins()
183 
184 -    anonymous_user = TwistedConfiguration.anonymous_user.im_func
185 +    # By default anonymous login are allow but some test need to deny of to
186 +    # change the default user. Set it to None to prevent anonymous login.
187 +    anonymous_credential = ('anon', 'anon')
188 +
189 +    def anonymous_user(self):
190 +        if not self.anonymous_credential:
191 +            return None, None
192 +        return self.anonymous_credential
193 +
194 +    def set_anonymous_allowed(self, allowed, anonuser='anon'):
195 +        if allowed:
196 +            self.anonymous_credential = (anonuser, anonuser)
197 +        else:
198 +            self.anonymous_credential = None
199 
200      @property
201      def apphome(self):
202          return self._apphome
203      appdatahome = apphome
204 
205      def load_configuration(self):
206          super(TestServerConfiguration, self).load_configuration()
207 -        if self.anonymous_credential:
208 -            user, password = self.anonymous_credential
209 -            self.global_set_option('anonymous-user', user)
210 -            self.global_set_option('anonymous-password', password)
211          # no undo support in tests
212          self.global_set_option('undo-support', '')
213 
214      def main_config_file(self):
215          """return instance's control configuration file"""
@@ -212,135 +259,406 @@
216            def test_something(self):
217                rset = self.execute('Any X WHERE X is CWUser')
218                self.view('foaf', rset)
219 
220      """
221 -    db_require_setup = False    # skip init_db / reset_db steps
222      read_instance_schema = True # read schema from database
223 
224 
225  # test database handling #######################################################
226 
227 -def init_test_database(config=None, appid='data', apphome=None):
228 -    """init a test database for a specific driver"""
229 -    from cubicweb.dbapi import in_memory_repo_cnx
230 -    config = config or TestServerConfiguration(appid, apphome=apphome)
231 -    sources = config.sources()
232 -    driver = sources['system']['db-driver']
233 -    if config.db_require_setup:
234 -        if driver == 'sqlite':
235 -            init_test_database_sqlite(config)
236 -        elif driver == 'postgres':
237 -            init_test_database_postgres(config)
238 +DEFAULT_EMPTY_DB_ID = '__default_empty_db__'
239 +
240 +class TestDataBaseHandler(object):
241 +    DRIVER = None
242 +    db_cache = {}
243 +    explored_glob = set()
244 +
245 +    def __init__(self, config):
246 +        self.config = config
247 +        self._repo = None
248 +        # pure consistency check
249 +        assert self.system_source['db-driver'] == self.DRIVER
250 +
251 +    def _ensure_test_backup_db_dir(self):
252 +        """Return path of directory for database backup.
253 +
254 +        The function create it if necessary"""
255 +        backupdir = join(self.config.apphome, 'database')
256 +        if not isdir(backupdir):
257 +            os.makedirs(backupdir)
258 +        return backupdir
259 +
260 +    def config_path(self, db_id):
261 +        """Path for config backup of a given database id"""
262 +        return self.absolute_backup_file(db_id, 'config')
263 +
264 +    def absolute_backup_file(self, db_id, suffix):
265 +        """Path for config backup of a given database id"""
266 +        dbname = self.dbname.replace('-', '_')
267 +        assert '.' not in db_id
268 +        filename = '%s-%s.%s' % (dbname, db_id, suffix)
269 +        return join(self._ensure_test_backup_db_dir(), filename)
270 +
271 +    def db_cache_key(self, db_id, dbname=None):
272 +        """Build a database cache key for a db_id with the current config
273 +
274 +        This key is meant to be used in the cls.db_cache mapping"""
275 +        if dbname is None:
276 +            dbname = self.dbname
277 +        dbname = os.path.basename(dbname)
278 +        dbname = dbname.replace('-', '_')
279 +        return (self.config.apphome, dbname, db_id)
280 +
281 +    def backup_database(self, db_id):
282 +        """Store the content of the current database as <db_id>
283 +
284 +        The config used are also stored."""
285 +        backup_data = self._backup_database(db_id)
286 +        config_path = self.config_path(db_id)
287 +        # XXX we dump a dict of the config
288 +        # This is an experimental to help config dependant setup (like BFSS) to
289 +        # be propertly restored
290 +        with open(config_path, 'wb') as conf_file:
291 +            conf_file.write(pickle.dumps(dict(self.config)))
292 +        self.db_cache[self.db_cache_key(db_id)] = (backup_data, config_path)
293 +
294 +    def _backup_database(self, db_id):
295 +        """Actual backup the current database.
296 +
297 +        return a value to be stored in db_cache to allow restoration"""
298 +        raise NotImplementedError()
299 +
300 +    def restore_database(self, db_id):
301 +        """Restore a database.
302 +
303 +        takes as argument value stored in db_cache by self._backup_database"""
304 +        # XXX set a clearer error message ???
305 +        backup_coordinates, config_path = self.db_cache[self.db_cache_key(db_id)]
306 +        # reload the config used to create the database.
307 +        config = pickle.loads(open(config_path, 'rb').read())
308 +        # shutdown repo before changing database content
309 +        if self._repo is not None:
310 +            self._repo.turn_repo_off()
311 +        self._restore_database(backup_coordinates, config)
312 +
313 +    def _restore_database(self, backup_coordinates, config):
314 +        """Actual restore of the current database.
315 +
316 +        Use the value tostored in db_cache as input """
317 +        raise NotImplementedError()
318 +
319 +    def get_repo(self, startup=False):
320 +        """ return Repository object on the current database.
321 +
322 +        (turn the current repo object "on" if there is one or recreate one)
323 +        if startup is True, server startup server hooks will be called if needed
324 +        """
325 +        if self._repo is None:
326 +            self._repo = self._new_repo(self.config)
327 +        repo = self._repo
328 +        repo.turn_repo_on()
329 +        if startup and not repo._has_started:
330 +            repo.hm.call_hooks('server_startup', repo=repo)
331 +            repo._has_started = True
332 +        return repo
333 +
334 +    def _new_repo(self, config):
335 +        """Factory method to create a new Repository Instance"""
336 +        from cubicweb.dbapi import in_memory_repo
337 +        config._cubes = None
338 +        repo = in_memory_repo(config)
339 +        # extending Repository class
340 +        repo._has_started = False
341 +        repo._needs_refresh = False
342 +        repo.turn_repo_on = partial(turn_repo_on, repo)
343 +        repo.turn_repo_off = partial(turn_repo_off, repo)
344 +        return repo
345 +
346 +
347 +    def get_cnx(self):
348 +        """return Connection object ont he current repository"""
349 +        from cubicweb.dbapi import in_memory_cnx
350 +        repo = self.get_repo()
351 +        sources = self.config.sources()
352 +        login  = unicode(sources['admin']['login'])
353 +        password = sources['admin']['password'] or 'xxx'
354 +        cnx = in_memory_cnx(repo, login, password=password)
355 +        return cnx
356 +
357 +    def get_repo_and_cnx(self, db_id=DEFAULT_EMPTY_DB_ID):
358 +        """Reset database with the current db_id and return (repo, cnx)
359 +
360 +        A database *MUST* have been build with the current <db_id> prior to
361 +        call this method. See the ``build_db_cache`` method. The returned
362 +        repository have it's startup hooks called and the connection is
363 +        establised as admin."""
364 +
365 +        self.restore_database(db_id)
366 +        repo = self.get_repo(startup=True)
367 +        cnx  = self.get_cnx()
368 +        return repo, cnx
369 +
370 +    @property
371 +    def system_source(self):
372 +        sources = self.config.sources()
373 +        return sources['system']
374 +
375 +    @property
376 +    def dbname(self):
377 +        return self.system_source['db-name']
378 +
379 +    def init_test_database():
380 +        """actual initialisation of the database"""
381 +        raise ValueError('no initialization function for driver %r' % driver)
382 +
383 +    def has_cache(self, db_id):
384 +        """Check if a given database id exist in cb cache for the current config"""
385 +        cache_glob = self.absolute_backup_file('*', '*')
386 +        if cache_glob not in self.explored_glob:
387 +            self.discover_cached_db()
388 +        return self.db_cache_key(db_id) in self.db_cache
389 +
390 +    def discover_cached_db(self):
391 +        """Search available db_if for the current config"""
392 +        cache_glob = self.absolute_backup_file('*', '*')
393 +        directory = os.path.dirname(cache_glob)
394 +        entries={}
395 +        candidates = glob.glob(cache_glob)
396 +        for filepath in candidates:
397 +            data = os.path.basename(filepath)
398 +            # database backup are in the forms are <dbname>-<db_id>.<backtype>
399 +            dbname, data = data.split('-', 1)
400 +            db_id, filetype = data.split('.', 1)
401 +            entries.setdefault((dbname, db_id), {})[filetype] = filepath
402 +        for (dbname, db_id), entry in entries.iteritems():
403 +            # apply necessary transformation from the driver
404 +            value = self.process_cache_entry(directory, dbname, db_id, entry)
405 +            assert 'config' in entry
406 +            if value is not None: # None value means "not handled by this driver
407 +                                  # XXX Ignored value are shadowed to other Handler if cache are common.
408 +                key = self.db_cache_key(db_id, dbname=dbname)
409 +                self.db_cache[key] = value, entry['config']
410 +        self.explored_glob.add(cache_glob)
411 +
412 +    def process_cache_entry(self, directory, dbname, db_id, entry):
413 +        """Transforms potential cache entry to proper backup coordinate
414 +
415 +        entry argument is a "filetype" -> "filepath" mapping
416 +        Return None if an entry should be ignored."""
417 +        return None
418 +
419 +    def build_db_cache(self, test_db_id=DEFAULT_EMPTY_DB_ID, pre_setup_func=None):
420 +        """Build Database cache for ``test_db_id`` if a cache doesn't exist
421 +
422 +        if ``test_db_id is DEFAULT_EMPTY_DB_ID`` self.init_test_database is
423 +        called. otherwise, DEFAULT_EMPTY_DB_ID is build/restored and
424 +        ``pre_setup_func`` to setup the database.
425 +
426 +        This function backup any database it build"""
427 +
428 +        if self.has_cache(test_db_id):
429 +            return #test_db_id, 'already in cache'
430 +        if test_db_id is DEFAULT_EMPTY_DB_ID:
431 +            self.init_test_database()
432          else:
433 -            raise ValueError('no initialization function for driver %r' % driver)
434 -    config._cubes = None # avoid assertion error
435 -    repo, cnx = in_memory_repo_cnx(config, unicode(sources['admin']['login']),
436 -                              password=sources['admin']['password'] or 'xxx')
437 -    if driver == 'sqlite':
438 -        install_sqlite_patch(repo.querier)
439 -    return repo, cnx
440 -
441 -def reset_test_database(config):
442 -    """init a test database for a specific driver"""
443 -    if not config.db_require_setup:
444 -        return
445 -    driver = config.sources()['system']['db-driver']
446 -    if driver == 'sqlite':
447 -        reset_test_database_sqlite(config)
448 -    elif driver == 'postgres':
449 -        init_test_database_postgres(config)
450 -    else:
451 -        raise ValueError('no reset function for driver %r' % driver)
452 -
453 +            print 'Building %s for database %s' % (test_db_id, self.dbname)
454 +            self.build_db_cache(DEFAULT_EMPTY_DB_ID)
455 +            self.restore_database(DEFAULT_EMPTY_DB_ID)
456 +            repo = self.get_repo(startup=True)
457 +            cnx = self.get_cnx()
458 +            session = repo._sessions[cnx.sessionid]
459 +            session.set_pool()
460 +            _commit = session.commit
461 +            def always_pooled_commit():
462 +                _commit()
463 +                session.set_pool()
464 +            session.commit = always_pooled_commit
465 +            pre_setup_func(session, self.config)
466 +            session.commit()
467 +            cnx.close()
468 +        self.backup_database(test_db_id)
469 
470  ### postgres test database handling ############################################
471 
472 -def init_test_database_postgres(config):
473 -    """initialize a fresh postgresql databse used for testing purpose"""
474 -    from logilab.database import get_db_helper
475 -    from cubicweb.server import init_repository
476 -    from cubicweb.server.serverctl import (createdb, system_source_cnx,
477 -                                           _db_sys_cnx)
478 -    source = config.sources()['system']
479 -    dbname = source['db-name']
480 -    templdbname = dbname + '_template'
481 -    helper = get_db_helper('postgres')
482 -    # connect on the dbms system base to create our base
483 -    dbcnx = _db_sys_cnx(source, 'CREATE DATABASE and / or USER', verbose=0)
484 -    cursor = dbcnx.cursor()
485 -    try:
486 -        if dbname in helper.list_databases(cursor):
487 -            cursor.execute('DROP DATABASE %s' % dbname)
488 -        if not templdbname in helper.list_databases(cursor):
489 -            source['db-name'] = templdbname
490 -            createdb(helper, source, dbcnx, cursor)
491 -            dbcnx.commit()
492 -            cnx = system_source_cnx(source, special_privs='LANGUAGE C', verbose=0)
493 +class PostgresTestDataBaseHandler(TestDataBaseHandler):
494 +
495 +    # XXX
496 +    # XXX PostgresTestDataBaseHandler Have not been tested at all.
497 +    # XXX
498 +    DRIVER = 'postgres'
499 +
500 +    @property
501 +    @cached
502 +    def helper(self):
503 +        from logilab.database import get_db_helper
504 +        return get_db_helper('postgres')
505 +
506 +    @property
507 +    @cached
508 +    def dbcnx(self):
509 +        from cubicweb.server.serverctl import _db_sys_cnx
510 +        return  _db_sys_cnx(self.system_source, 'CREATE DATABASE and / or USER', verbose=0)
511 +
512 +    @property
513 +    @cached
514 +    def cursor(self):
515 +        return self.dbcnx.cursor()
516 +
517 +    def init_test_database(self):
518 +        """initialize a fresh postgresql databse used for testing purpose"""
519 +        from cubicweb.server import init_repository
520 +        from cubicweb.server.serverctl import system_source_cnx, createdb
521 +        # connect on the dbms system base to create our base
522 +        try:
523 +            self._drop(self.dbname)
524 +
525 +            createdb(self.helper, self.system_source, self.dbcnx, self.cursor)
526 +            self.dbcnx.commit()
527 +            cnx = system_source_cnx(self.system_source, special_privs='LANGUAGE C', verbose=0)
528              templcursor = cnx.cursor()
529 -            # XXX factorize with db-create code
530 -            helper.init_fti_extensions(templcursor)
531 -            # install plpythonu/plpgsql language if not installed by the cube
532 -            langs = sys.platform == 'win32' and ('plpgsql',) or ('plpythonu', 'plpgsql')
533 -            for extlang in langs:
534 -                helper.create_language(templcursor, extlang)
535 -            cnx.commit()
536 -            templcursor.close()
537 -            cnx.close()
538 -            init_repository(config, interactive=False)
539 -            source['db-name'] = dbname
540 -    except:
541 -        dbcnx.rollback()
542 -        # XXX drop template
543 -        raise
544 -    createdb(helper, source, dbcnx, cursor, template=templdbname)
545 -    dbcnx.commit()
546 -    dbcnx.close()
547 +            try:
548 +                # XXX factorize with db-create code
549 +                self.helper.init_fti_extensions(templcursor)
550 +                # install plpythonu/plpgsql language if not installed by the cube
551 +                langs = sys.platform == 'win32' and ('plpgsql',) or ('plpythonu', 'plpgsql')
552 +                for extlang in langs:
553 +                    self.helper.create_language(templcursor, extlang)
554 +                cnx.commit()
555 +            finally:
556 +                templcursor.close()
557 +                cnx.close()
558 +            init_repository(self.config, interactive=False)
559 +        except:
560 +            self.dbcnx.rollback()
561 +            print >> sys.stderr, 'building', self.dbname, 'failed'
562 +            #self._drop(self.dbname)
563 +            raise
564 +
565 +    def helper_clear_cache(self):
566 +        self.dbcnx.commit()
567 +        self.dbcnx.close()
568 +        clear_cache(self, 'dbcnx')
569 +        clear_cache(self, 'helper')
570 +        clear_cache(self, 'cursor')
571 +
572 +    def __del__(self):
573 +        self.helper_clear_cache()
574 +
575 +    @property
576 +    def _config_id(self):
577 +        return hashlib.sha1(self.config.apphome).hexdigest()[:10]
578 +
579 +    def _backup_name(self, db_id): # merge me with parent
580 +        backup_name = '_'.join(('cache', self._config_id, self.dbname, db_id))
581 +        return backup_name.lower()
582 +
583 +    def _drop(self, db_name):
584 +        if db_name in self.helper.list_databases(self.cursor):
585 +            #print 'dropping overwritted database:', db_name
586 +            self.cursor.execute('DROP DATABASE %s' % db_name)
587 +            self.dbcnx.commit()
588 +
589 +    def _backup_database(self, db_id):
590 +        """Actual backup the current database.
591 +
592 +        return a value to be stored in db_cache to allow restoration"""
593 +        from cubicweb.server.serverctl import createdb
594 +        orig_name = self.system_source['db-name']
595 +        try:
596 +            backup_name = self._backup_name(db_id)
597 +            #print 'storing postgres backup as', backup_name
598 +            self._drop(backup_name)
599 +            self.system_source['db-name'] = backup_name
600 +            createdb(self.helper, self.system_source, self.dbcnx, self.cursor, template=orig_name)
601 +            self.dbcnx.commit()
602 +            return backup_name
603 +        finally:
604 +            self.system_source['db-name'] = orig_name
605 +
606 +    def _restore_database(self, backup_coordinates, config):
607 +        from cubicweb.server.serverctl import createdb
608 +        """Actual restore of the current database.
609 +
610 +        Use the value tostored in db_cache as input """
611 +        #print 'restoring postgrest backup from', backup_coordinates
612 +        self._drop(self.dbname)
613 +        createdb(self.helper, self.system_source, self.dbcnx, self.cursor,
614 +                 template=backup_coordinates)
615 +        self.dbcnx.commit()
616 +
617 +
618 
619  ### sqlserver2005 test database handling #######################################
620 
621 -def init_test_database_sqlserver2005(config):
622 -    """initialize a fresh sqlserver databse used for testing purpose"""
623 -    if config.init_repository:
624 -        from cubicweb.server import init_repository
625 -        init_repository(config, interactive=False, drop=True)
626 +class SQLServerTestDataBaseHandler(TestDataBaseHandler):
627 +    DRIVER = 'sqlserver'
628 +
629 +    # XXX complete me
630 +
631 +    def init_test_database(self):
632 +        """initialize a fresh sqlserver databse used for testing purpose"""
633 +        if self.config.init_repository:
634 +            from cubicweb.server import init_repository
635 +            init_repository(config, interactive=False, drop=True)
636 
637  ### sqlite test database handling ##############################################
638 
639 -def cleanup_sqlite(dbfile, removetemplate=False):
640 -    try:
641 -        os.remove(dbfile)
642 -        os.remove('%s-journal' % dbfile)
643 -    except OSError:
644 -        pass
645 -    if removetemplate:
646 +class SQLiteTestDataBaseHandler(TestDataBaseHandler):
647 +    DRIVER = 'sqlite'
648 +
649 +    @staticmethod
650 +    def _cleanup_database(dbfile):
651          try:
652 -            os.remove('%s-template' % dbfile)
653 +            os.remove(dbfile)
654 +            os.remove('%s-journal' % dbfile)
655          except OSError:
656              pass
657 
658 -def reset_test_database_sqlite(config):
659 -    import shutil
660 -    dbfile = config.sources()['system']['db-name']
661 -    cleanup_sqlite(dbfile)
662 -    template = '%s-template' % dbfile
663 -    if exists(template):
664 -        shutil.copy(template, dbfile)
665 -        return True
666 -    return False
667 +    def absolute_dbfile(self):
668 +        """absolute path of current database file"""
669 +        dbfile = join(self._ensure_test_backup_db_dir(),
670 +                      self.config.sources()['system']['db-name'])
671 +        self.config.sources()['system']['db-name'] = dbfile
672 +        return dbfile
673 
674 -def init_test_database_sqlite(config):
675 -    """initialize a fresh sqlite databse used for testing purpose"""
676 -    # remove database file if it exists
677 -    dbfile = join(config.apphome, config.sources()['system']['db-name'])
678 -    config.sources()['system']['db-name'] = dbfile
679 -    if not reset_test_database_sqlite(config):
680 +
681 +    def process_cache_entry(self, directory, dbname, db_id, entry):
682 +        return entry.get('sqlite')
683 +
684 +    def _backup_database(self, db_id=DEFAULT_EMPTY_DB_ID):
685 +        # XXX remove database file if it exists ???
686 +        dbfile = self.absolute_dbfile()
687 +        backup_file = self.absolute_backup_file(db_id, 'sqlite')
688 +        shutil.copy(dbfile, backup_file)
689 +        # Usefull to debug WHO write a database
690 +        # backup_stack = self.absolute_backup_file(db_id, '.stack')
691 +        #with open(backup_stack, 'w') as backup_stack_file:
692 +        #    import traceback
693 +        #    traceback.print_stack(file=backup_stack_file)
694 +        return backup_file
695 +
696 +    def _new_repo(self, config):
697 +        repo = super(SQLiteTestDataBaseHandler, self)._new_repo(config)
698 +        install_sqlite_patch(repo.querier)
699 +        return repo
700 +
701 +    def _restore_database(self, backup_coordinates, _config):
702 +        # remove database file if it exists ?
703 +        dbfile = self.absolute_dbfile()
704 +        self._cleanup_database(dbfile)
705 +        #print 'resto from', backup_coordinates
706 +        shutil.copy(backup_coordinates, dbfile)
707 +        repo = self.get_repo()
708 +
709 +    def init_test_database(self):
710 +        """initialize a fresh sqlite databse used for testing purpose"""
711          # initialize the database
712 -        import shutil
713          from cubicweb.server import init_repository
714 -        init_repository(config, interactive=False)
715 -        shutil.copy(dbfile, '%s-template' % dbfile)
716 +        self._cleanup_database(self.absolute_dbfile())
717 +        init_repository(self.config, interactive=False)
718 +
719 
720  def install_sqlite_patch(querier):
721      """This patch hotfixes the following sqlite bug :
722         - http://www.sqlite.org/cvstrac/tktview?tn=1327,33
723         (some dates are returned as strings rather thant date objects)
@@ -375,5 +693,81 @@
724                          break
725              return rset
726          return new_execute
727      querier.__class__.execute = wrap_execute(querier.__class__.execute)
728      querier.__class__._devtools_sqlite_patched = True
729 +
730 +
731 +
732 +HANDLERS = {}
733 +
734 +def register_handler(handlerkls, overwrite=False):
735 +    assert handlerkls is not None
736 +    if overwrite or handlerkls.DRIVER not in HANDLERS:
737 +        HANDLERS[handlerkls.DRIVER] = handlerkls
738 +    else:
739 +        msg = "%s: Handler already exists use overwrite if it's intended\n"\
740 +              "(existing handler class is %r)"
741 +        raise ValueError(msg % (handlerkls.DRIVER, HANDLERS[handlerkls.DRIVER]))
742 +
743 +register_handler(PostgresTestDataBaseHandler)
744 +register_handler(SQLiteTestDataBaseHandler)
745 +register_handler(SQLServerTestDataBaseHandler)
746 +
747 +
748 +class HCache(object):
749 +    """Handler cache object: store database handler for a given configuration.
750 +
751 +    We only keep one repo in cache to prevent too much objects to stay alive
752 +    (database handler holds a reference to a repository). As at the moment a new
753 +    handler is created for each TestCase class and all test methods are executed
754 +    sequentialy whithin this class, there should not have more cache miss that
755 +    if we had a wider cache as once a Handler stop being used it won't be used
756 +    again.
757 +    """
758 +
759 +    def __init__(self):
760 +        self.config = None
761 +        self.handler = None
762 +
763 +    def get(self, config):
764 +        if config is self.config:
765 +            return self.handler
766 +        else:
767 +            return None
768 +
769 +    def set(self, config, handler):
770 +        self.config = config
771 +        self.handler = handler
772 +
773 +HCACHE = HCache()
774 +
775 +
776 +# XXX a class method on Test ?
777 +def get_test_db_handler(config):
778 +    handler = HCACHE.get(config)
779 +    if handler is not None:
780 +        return handler
781 +    sources = config.sources()
782 +    driver = sources['system']['db-driver']
783 +    key = (driver, config)
784 +    handlerkls = HANDLERS.get(driver, None)
785 +    if handlerkls is not None:
786 +        handler = handlerkls(config)
787 +        HCACHE.set(config, handler)
788 +        return handler
789 +    else:
790 +        raise ValueError('no initialization function for driver %r' % driver)
791 +
792 +### compatibility layer ##############################################
793 +from logilab.common.deprecation import deprecated
794 +
795 +@deprecated("please use the new DatabaseHandler mecanism")
796 +def init_test_database(config=None, configdir='data', apphome=None):
797 +    """init a test database for a specific driver"""
798 +    if config is None:
799 +        config = TestServerConfiguration(apphome=apphome)
800 +    handler = get_test_db_handler(config)
801 +    handler.build_db_cache()
802 +    return handler.get_repo_and_cnx()
803 +
804 +
diff --git a/devtools/cwwindmill.py b/devtools/cwwindmill.py
@@ -87,15 +87,14 @@
805              test_dir = __file__
806 
807          Instead of toggle `edit_test` value, try `python <test script> -f`
808          """
809          browser = 'firefox'
810 -<<<<<<< /home/syt/src/fcubicweb/cubicweb/devtools/cwwindmill.py
811 
812          edit_test = "-i" in sys.argv # detection for pytest invocation
813          # Windmill use case are written with no anonymous user
814 -        anonymous_logged = False
815 +        anonymous_allowed = False
816 
817          tags = CubicWebServerTC.tags & Tags(('windmill',))
818 
819          def _test_dir(self):
820              """access to class attribute if possible or make assumption
diff --git a/devtools/httptest.py b/devtools/httptest.py
@@ -87,16 +87,15 @@
821 
822  class CubicWebServerTC(CubicWebTC):
823      """Class for running test web server. See :class:`CubicWebServerConfig`.
824 
825      Class attributes:
826 -    * `anonymous_logged`: flag telling if anonymous user should be logged-in
827 -      by default (True by default) XXX (syt) s/logged-in/allowed/ ?
828 +    * `anonymous_allowed`: flag telling if anonymous browsing should be allowed
829      """
830      configcls = CubicWebServerConfig
831      # anonymous is logged by default in cubicweb test cases
832 -    anonymous_logged = True
833 +    anonymous_allowed = True
834 
835      def start_server(self):
836          # use a semaphore to avoid starting test while the http server isn't
837          # fully initilialized
838          semaphore = threading.Semaphore(0)
@@ -187,8 +186,7 @@
839              print err
840          super(CubicWebServerTC, self).tearDown()
841 
842      @classmethod
843      def init_config(cls, config):
844 -        if not cls.anonymous_logged:
845 -            config.anonymous_credential = None
846 +        config.set_anonymous_allowed(cls.anonymous_allowed)
847          super(CubicWebServerTC, cls).init_config(config)
diff --git a/devtools/test/unittest_httptest.py b/devtools/test/unittest_httptest.py
@@ -40,11 +40,11 @@
848              self.fail('no mention of base url in retrieved page')
849 
850 
851  class TwistedCWIdentTC(CubicWebServerTC):
852 
853 -    anonymous_logged = False
854 +    anonymous_allowed = False
855      tags = CubicWebServerTC.tags | Tags(('auth',))
856 
857      def test_response_denied(self):
858          response = self.web_get()
859          self.assertEqual(response.status, httplib.FORBIDDEN)
diff --git a/devtools/testlib.py b/devtools/testlib.py
@@ -47,11 +47,11 @@
860  from cubicweb.sobjects import notification
861  from cubicweb.web import Redirect, application
862  from cubicweb.server.session import security_enabled
863  from cubicweb.server.hook import SendMailOp
864  from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
865 -from cubicweb.devtools import BASE_URL, fake, htmlparser
866 +from cubicweb.devtools import BASE_URL, fake, htmlparser, DEFAULT_EMPTY_DB_ID
867  from cubicweb.utils import json
868 
869  # low-level utilities ##########################################################
870 
871  class CubicWebDebugger(Debugger):
@@ -59,11 +59,12 @@
872      html into a temporary file and open a web browser to examinate it.
873      """
874      def do_view(self, arg):
875          import webbrowser
876          data = self._getval(arg)
877 -        file('/tmp/toto.html', 'w').write(data)
878 +        with file('/tmp/toto.html', 'w') as toto:
879 +            toto.write(data)
880          webbrowser.open('file:///tmp/toto.html')
881 
882  def line_context_filter(line_no, center, before=3, after=None):
883      """return true if line are in context
884 
@@ -81,26 +82,10 @@
885          protected_entities = yams.schema.BASE_TYPES
886      else:
887          protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES)
888      return set(schema.entities()) - protected_entities
889 
890 -def refresh_repo(repo, resetschema=False, resetvreg=False):
891 -    for pool in repo.pools:
892 -        pool.close(True)
893 -    repo.system_source.shutdown()
894 -    devtools.reset_test_database(repo.config)
895 -    for pool in repo.pools:
896 -        pool.reconnect()
897 -    repo._type_source_cache = {}
898 -    repo._extid_cache = {}
899 -    repo.querier._rql_cache = {}
900 -    for source in repo.sources:
901 -        source.reset_caches()
902 -    if resetschema:
903 -        repo.set_schema(repo.config.load_schema(), resetvreg=resetvreg)
904 -
905 -
906  # email handling, to test emails sent by an application ########################
907 
908  MAILBOX = []
909 
910  class Email:
@@ -189,18 +174,32 @@
911      """
912      appid = 'data'
913      configcls = devtools.ApptestConfiguration
914      reset_schema = reset_vreg = False # reset schema / vreg between tests
915      tags = TestCase.tags | Tags('cubicweb', 'cw_repo')
916 +    test_db_id = DEFAULT_EMPTY_DB_ID
917 +    _cnxs = set() # establised connection
918 +    _cnx  = None  # current connection
919 +
920 +    # Too much complicated stuff. the class doesn't need to bear the repo anymore
921 +    @classmethod
922 +    def set_cnx(cls, cnx):
923 +        cls._cnxs.add(cnx)
924 +        cls._cnx = cnx
925 +
926 +    @property
927 +    def cnx(self):
928 +        return self.__class__._cnx
929 
930      @classproperty
931      def config(cls):
932          """return the configuration object
933 
934          Configuration is cached on the test class.
935          """
936          try:
937 +            assert not cls is CubicWebTC, "Don't use CubicWebTC directly to prevent database caching issue"
938              return cls.__dict__['_config']
939          except KeyError:
940              home = abspath(join(dirname(sys.modules[cls.__module__].__file__), cls.appid))
941              config = cls._config = cls.configcls(cls.appid, apphome=home)
942              config.mode = 'test'
@@ -235,39 +234,36 @@
943          try:
944              config.global_set_option('embed-allowed', re.compile('.*'))
945          except: # not in server only configuration
946              pass
947 
948 +    #XXX this doesn't need to a be classmethod anymore
949      @classmethod
950      def _init_repo(cls):
951          """init the repository and connection to it.
952 -
953 -        Repository and connection are cached on the test class. Once
954 -        initialized, we simply reset connections and repository caches.
955          """
956 -        if not 'repo' in cls.__dict__:
957 -            cls._build_repo()
958 -        else:
959 -            try:
960 -                cls.cnx.rollback()
961 -            except ProgrammingError:
962 -                pass
963 -            cls._refresh_repo()
964 +        # setup configuration for test
965 +        cls.init_config(cls.config)
966 +        # get or restore and working db.
967 +        db_handler = devtools.get_test_db_handler(cls.config)
968 +        db_handler.build_db_cache(cls.test_db_id, cls.pre_setup_database)
969 
970 -    @classmethod
971 -    def _build_repo(cls):
972 -        cls.repo, cls.cnx = devtools.init_test_database(config=cls.config)
973 -        cls.init_config(cls.config)
974 -        cls.repo.hm.call_hooks('server_startup', repo=cls.repo)
975 +        cls.repo, cnx = db_handler.get_repo_and_cnx(cls.test_db_id)
976 +        # no direct assignation to cls.cnx anymore.
977 +        # cnx is now an instance property that use a class protected attributes.
978 +        cls.set_cnx(cnx)
979          cls.vreg = cls.repo.vreg
980 -        cls.websession = DBAPISession(cls.cnx, cls.admlogin)
981 -        cls._orig_cnx = (cls.cnx, cls.websession)
982 +        cls.websession = DBAPISession(cnx, cls.admlogin)
983 +        cls._orig_cnx = (cnx, cls.websession)
984          cls.config.repository = lambda x=None: cls.repo
985 
986 -    @classmethod
987 -    def _refresh_repo(cls):
988 -        refresh_repo(cls.repo, cls.reset_schema, cls.reset_vreg)
989 +    def _close_cnx(self):
990 +        for cnx in list(self._cnxs):
991 +            if not cnx._closed:
992 +                cnx.rollback()
993 +                cnx.close()
994 +            self._cnxs.remove(cnx)
995 
996      # global resources accessors ###############################################
997 
998      @property
999      def schema(self):
@@ -305,38 +301,51 @@
1000 
1001      # default test setup and teardown #########################################
1002 
1003      def setUp(self):
1004          # monkey patch send mail operation so emails are sent synchronously
1005 -        self._old_mail_postcommit_event = SendMailOp.postcommit_event
1006 -        SendMailOp.postcommit_event = SendMailOp.sendmails
1007 +        self._patch_SendMailOp()
1008          pause_tracing()
1009          previous_failure = self.__class__.__dict__.get('_repo_init_failed')
1010          if previous_failure is not None:
1011              self.skipTest('repository is not initialised: %r' % previous_failure)
1012          try:
1013              self._init_repo()
1014 +            self.addCleanup(self._close_cnx)
1015          except Exception, ex:
1016              self.__class__._repo_init_failed = ex
1017              raise
1018          resume_tracing()
1019 -        self._cnxs = []
1020          self.setup_database()
1021          self.commit()
1022          MAILBOX[:] = [] # reset mailbox
1023 
1024      def tearDown(self):
1025 -        if not self.cnx._closed:
1026 -            self.cnx.rollback()
1027 -        for cnx in self._cnxs:
1028 -            if not cnx._closed:
1029 -                cnx.close()
1030 -        SendMailOp.postcommit_event = self._old_mail_postcommit_event
1031 +        # XXX hack until logilab.common.testlib is fixed
1032 +        while self._cleanups:
1033 +            cleanup, args, kwargs = self._cleanups.pop(-1)
1034 +            cleanup(*args, **kwargs)
1035 +
1036 +    def _patch_SendMailOp(self):
1037 +        # monkey patch send mail operation so emails are sent synchronously
1038 +        _old_mail_postcommit_event = SendMailOp.postcommit_event
1039 +        SendMailOp.postcommit_event = SendMailOp.sendmails
1040 +        def reverse_SendMailOp_monkey_patch():
1041 +            SendMailOp.postcommit_event = _old_mail_postcommit_event
1042 +        self.addCleanup(reverse_SendMailOp_monkey_patch)
1043 
1044      def setup_database(self):
1045          """add your database setup code by overriding this method"""
1046 
1047 +    @classmethod
1048 +    def pre_setup_database(cls, session, config):
1049 +        """add your pre database setup code by overriding this method
1050 +
1051 +        Do not forget to set the cls.test_db_id value to enable caching of the
1052 +        result.
1053 +        """
1054 +
1055      # user / session management ###############################################
1056 
1057      def user(self, req=None):
1058          """return the application schema"""
1059          if req is None:
@@ -369,28 +378,24 @@
1060              # definitly don't want autoclose when used as a context manager
1061              return self.cnx
1062          autoclose = kwargs.pop('autoclose', True)
1063          if not kwargs:
1064              kwargs['password'] = str(login)
1065 -        self.cnx = repo_connect(self.repo, unicode(login), **kwargs)
1066 +        self.set_cnx(repo_connect(self.repo, unicode(login), **kwargs))
1067          self.websession = DBAPISession(self.cnx)
1068 -        self._cnxs.append(self.cnx)
1069          if login == self.vreg.config.anonymous_user()[0]:
1070              self.cnx.anonymous_connection = True
1071          if autoclose:
1072              return TestCaseConnectionProxy(self, self.cnx)
1073          return self.cnx
1074 
1075      def restore_connection(self):
1076          if not self.cnx is self._orig_cnx[0]:
1077              if not self.cnx._closed:
1078                  self.cnx.close()
1079 -            try:
1080 -                self._cnxs.remove(self.cnx)
1081 -            except ValueError:
1082 -                pass
1083 -        self.cnx, self.websession = self._orig_cnx
1084 +        cnx, self.websession = self._orig_cnx
1085 +        self.set_cnx(cnx)
1086 
1087      # db api ##################################################################
1088 
1089      @nocoverage
1090      def cursor(self, req=None):
@@ -658,10 +663,14 @@
1091          return self.expect_redirect(lambda x: self.app_publish(x, path), req)
1092 
1093      def init_authentication(self, authmode, anonuser=None):
1094          self.set_option('auth-mode', authmode)
1095          self.set_option('anonymous-user', anonuser)
1096 +        if anonuser is None:
1097 +            self.config.anonymous_credential = None
1098 +        else:
1099 +            self.config.anonymous_credential = (anonuser, anonuser)
1100          req = self.request()
1101          origsession = req.session
1102          req.session = req.cnx = None
1103          del req.execute # get back to class implementation
1104          sh = self.app.session_handler
@@ -948,10 +957,12 @@
1105 
1106  class AutoPopulateTest(CubicWebTC):
1107      """base class for test with auto-populating of the database"""
1108      __abstract__ = True
1109 
1110 +    test_db_id = 'autopopulate'
1111 +
1112      tags = CubicWebTC.tags | Tags('autopopulated')
1113 
1114      pdbclass = CubicWebDebugger
1115      # this is a hook to be able to define a list of rql queries
1116      # that are application dependent and cannot be guessed automatically
@@ -1081,11 +1092,13 @@
1117      """import this if you wan automatic tests to be ran"""
1118 
1119      tags = AutoPopulateTest.tags | Tags('web', 'generated')
1120 
1121      def setUp(self):
1122 -        AutoPopulateTest.setUp(self)
1123 +        assert not self.__class__ is AutomaticWebTest, 'Please subclass AutomaticWebTest to pprevent database caching issue'
1124 +        super(AutomaticWebTest, self).setUp()
1125 +
1126          # access to self.app for proper initialization of the authentication
1127          # machinery (else some views may fail)
1128          self.app
1129 
1130      ## one each
diff --git a/doc/book/en/admin/setup.rst b/doc/book/en/admin/setup.rst
@@ -34,21 +34,21 @@
1131  ```````````````````````````
1132 
1133  Depending on the distribution you are using, add the appropriate line to your
1134  list of sources (for example by editing ``/etc/apt/sources.list``).
1135 
1136 -For Debian Lenny::
1137 -
1138 -  deb http://ftp.logilab.org/dists/ lenny/
1139 +For Debian Squeeze (stable)::
1140 
1141 -For Debian Sid::
1142 +  deb http://download.logilab.org/production/ squeeze/
1143 
1144 -  deb http://ftp.logilab.org/dists/ sid/
1145 +For Debian Sid (unstable)::
1146 
1147 -For Ubuntu Hardy::
1148 +  deb http://download.logilab.org/production/ sid/
1149 
1150 -  deb http://ftp.logilab.org/dists/ hardy/
1151 +For Ubuntu Lucid (Long Term Support)::
1152 +
1153 +  deb http://download.logilab.org/production/ lucid/
1154 
1155 
1156  You can now install the required packages with the following command::
1157 
1158    apt-get update
@@ -73,15 +73,15 @@
1159 
1160  The repositories are signed with `Logilab's gnupg key`_. To avoid warning on
1161  "apt-get update":
1162 
1163  1. become root using sudo
1164 -2. download http://ftp.logilab.org/dists/logilab-dists-key.asc using e.g. wget
1165 +2. download http://download.logilab.org/logilab-dists-key.asc using e.g. wget
1166  3. run "apt-key add logilab-dists-key.asc"
1167  4. re-run apt-get update (manually or through the package manager, whichever you prefer)
1168 
1169 -.. _`Logilab's gnupg key`: http://ftp.logilab.org/dists/logilab-dists-key.asc
1170 +.. _`Logilab's gnupg key`: http://download.logilab.org/logilab-dists-key.asc
1171  .. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
1172  .. _`cubicweb with other database`: DatabaseInstallation_
1173  .. _`cubicweb with postgresql datatabase` : PostgresqlConfiguration_
1174  .. _`cubicweb with mysql database` : MySqlConfiguration_
1175 
@@ -167,15 +167,15 @@
1176  Install from source
1177  ```````````````````
1178 
1179  .. _TarballInstallation:
1180 
1181 -You can download the archive containing the sources from our `ftp site`_ at::
1182 +You can download the archive containing the sources from our `download site`_ at::
1183 
1184 -  http://ftp.logilab.org/pub/cubicweb/
1185 +  http://download.logilab.org/pub/cubicweb/
1186 
1187 -.. _`ftp site`: http://ftp.logilab.org/pub/cubicweb/
1188 +.. _`download site`: http://download.logilab.org/pub/cubicweb/
1189 
1190  Make sure you also have all the :ref:`InstallDependencies`.
1191 
1192  .. _MercurialInstallation:
1193 
@@ -266,16 +266,16 @@
1194 
1195  Please be careful to select the right python (2.5) and postgres (8.4) versions.
1196 
1197  A windows compiled recent version of gettext::
1198 
1199 -  http://ftp.logilab.org/pub/gettext/gettext-0.17-win32-setup.exe
1200 +  http://download.logilab.org/pub/gettext/gettext-0.17-win32-setup.exe
1201 
1202  A pre-compiled version of rql for windows (take care of retrieving the
1203  most recent version available there)::
1204 
1205 -  http://ftp.logilab.org/pub/rql/rql-0.23.0.win32-py2.5.exe
1206 +  http://download.logilab.org/pub/rql/rql-0.23.0.win32-py2.5.exe
1207 
1208  Pyro enables remote access to cubicweb repository instances. Get it there::
1209 
1210    http://sourceforge.net/projects/pyro/files/
1211 
diff --git a/doc/book/en/annexes/rql/debugging.rst b/doc/book/en/annexes/rql/debugging.rst
@@ -31,11 +31,11 @@
1212 
1213 
1214  Enable verbose output
1215  ~~~~~~~~~~~~~~~~~~~~~
1216 
1217 -It may be interested to enable a verboser output to debug your RQL statements:
1218 +To debug your RQL statements, it can be useful to enable a verbose output:
1219 
1220  .. sourcecode:: python
1221 
1222      from cubicweb import server
1223      server.set_debug(server.DBG_RQL|server.DBG_SQL|server.DBG_ALL)
diff --git a/doc/book/en/annexes/rql/language.rst b/doc/book/en/annexes/rql/language.rst
@@ -151,11 +151,11 @@
1224    ascendant (`ASC`).
1225 
1226  - Aggregate Functions: COUNT, MIN, MAX, AVG, SUM, GROUP_CONCAT
1227 
1228  Having
1229 -``````
1230 +```````
1231 
1232  The HAVING clause, as in SQL, has been originally introduced to restrict a query
1233  according to value returned by an aggregate function, e.g.::
1234 
1235      Any X GROUPBY X WHERE X relation Y HAVING COUNT(Y) > 10
@@ -212,11 +212,16 @@
1236 
1237      Any P ORDERBY N LIMIT 5 OFFSET 10 WHERE P is Person, P firstname N
1238 
1239 
1240  Exists
1241 -``````
1242 +```````
1243 +
1244 +You can use `EXISTS` when you want to know if some expression is true and do not
1245 +need the complete set of elements that make it true. Testing for existence is
1246 +much faster than fetching the complete set of results.
1247 +
1248  ::
1249 
1250      Any X ORDERBY PN,N
1251      WHERE X num N, X version_of P, P name PN,
1252            EXISTS(X in_state S, S name IN ("dev", "ready"))
diff --git a/doc/book/en/devrepo/testing.rst b/doc/book/en/devrepo/testing.rst
@@ -290,10 +290,31 @@
1253  .. warning::
1254 
1255    Take care to not let the imported `AutomaticWebTest` in your test module
1256    namespace, else both your subclass *and* this parent class will be run.
1257 
1258 +Cache heavy database setup
1259 +-------------------------------
1260 +
1261 +Some tests suite require a complex setup of the database that takes seconds (or
1262 +event minutes) to complete. Doing the whole setup for all individual tests make
1263 +the whole run very slow. The ``CubicWebTC`` class offer a simple way to prepare
1264 +specific database once for multiple tests. The `test_db_id` class attribute of
1265 +your ``CubicWebTC`` must be set a unique identifier and the
1266 +:meth:`pre_setup_database` class method build the cached content. As the
1267 +:meth:`pre_setup_database` method is not grantee to be called, you must not set
1268 +any class attribut to be used during test there.  Databases for each `test_db_id`
1269 +are automatically created if not already in cache.  Clearing the cache is up to
1270 +the user. Cache files are found in the :file:`data/database` subdirectory of your
1271 +test directory.
1272 +
1273 +.. warning::
1274 +
1275 +  Take care to always have the same :meth:`pre_setup_database` function for all
1276 +  call with a given `test_db_id` otherwise you test will have unpredictable
1277 +  result given the first encountered one.
1278 +
1279  Testing on a real-life database
1280  -------------------------------
1281 
1282  The ``CubicWebTC`` class uses the `cubicweb.devtools.ApptestConfiguration`
1283  configuration class to setup its testing environment (database driver,
diff --git a/doc/book/en/tutorials/advanced/part03_bfss.rst b/doc/book/en/tutorials/advanced/part03_bfss.rst
@@ -21,24 +21,24 @@
1284 
1285      from cubicweb.server import hook
1286      from cubicweb.server.sources import storage
1287 
1288      class ServerStartupHook(hook.Hook):
1289 -	__regid__ = 'sytweb.serverstartup'
1290 -	events = ('server_startup', 'server_maintenance')
1291 +        __regid__ = 'sytweb.serverstartup'
1292 +        events = ('server_startup', 'server_maintenance')
1293 
1294 -	def __call__(self):
1295 -	    bfssdir = join(self.repo.config.appdatahome, 'bfss')
1296 -	    if not exists(bfssdir):
1297 -		makedirs(bfssdir)
1298 -		print 'created', bfssdir
1299 -	    storage = storages.BytesFileSystemStorage(bfssdir)
1300 -	    set_attribute_storage(self.repo, 'File', 'data', storage)
1301 +        def __call__(self):
1302 +            bfssdir = join(self.repo.config.appdatahome, 'bfss')
1303 +            if not exists(bfssdir):
1304 +                makedirs(bfssdir)
1305 +                print 'created', bfssdir
1306 +            storage = storages.BytesFileSystemStorage(bfssdir)
1307 +            set_attribute_storage(self.repo, 'File', 'data', storage)
1308 
1309  .. Note::
1310 
1311 -  * how we built the hook's registry identifier (_`_regid__`): you can introduce
1312 +  * how we built the hook's registry identifier (`__regid__`): you can introduce
1313      'namespaces' by using there python module like naming identifiers. This is
1314      especially import for hooks where you usually want a new custom hook, not
1315      overriding / specializing an existant one, but the concept may be applied to
1316      any application objects
1317 
@@ -48,52 +48,52 @@
1318      the storage set, else we'll be in trouble...
1319 
1320    * the path given to the storage is the place where file added through the ui
1321      (or in the database before migration) will be located
1322 
1323 -  * be ware that by doing this, you can't anymore write queries that will try to
1324 +  * beware that by doing this, you can't anymore write queries that will try to
1325      restrict on File `data` attribute. Hopefuly we don't do that usually
1326      on file's content or more generally on attributes for the Bytes type
1327 
1328  Now, if you've already added some photos through the web ui, you'll have to
1329  migrate existing data so file's content will be stored on the file-system instead
1330  of the database. There is a migration command to do so, let's run it in the
1331 -cubicweb shell (in actual life, you'd have to put it in a migration script as we
1332 -seen last time):
1333 +cubicweb shell (in real life, you would have to put it in a migration script as we
1334 +have seen last time):
1335 
1336  ::
1337 
1338     $ cubicweb-ctl shell sytweb
1339 -    entering the migration python shell
1340 -    just type migration commands or arbitrary python code and type ENTER to execute it
1341 -    type "exit" or Ctrl-D to quit the shell and resume operation
1342 -    >>> storage_changed('File', 'data')
1343 -    [........................]
1344 +   entering the migration python shell
1345 +   just type migration commands or arbitrary python code and type ENTER to execute it
1346 +   type "exit" or Ctrl-D to quit the shell and resume operation
1347 +   >>> storage_changed('File', 'data')
1348 +   [........................]
1349 
1350 
1351 -That's it. Now, file added through the web ui will have their content stored on
1352 +That's it. Now, files added through the web ui will have their content stored on
1353  the file-system, and you'll also be able to import files from the file-system as
1354  explained in the next part.
1355 
1356  Step 2: importing some data into the instance
1357  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1358 
1359 -Hey, we start to have some nice features, let give us a try on this new web
1360 +Hey, we start to have some nice features, let us give a try to this new web
1361  site. For instance if I have a 'photos/201005WePyrenees' containing pictures for
1362  a particular event, I can import it to my web site by typing ::
1363 
1364    $ cubicweb-ctl fsimport -F sytweb photos/201005WePyrenees/
1365    ** importing directory /home/syt/photos/201005WePyrenees
1366 -    importing IMG_8314.JPG
1367 -    importing IMG_8274.JPG
1368 -    importing IMG_8286.JPG
1369 -    importing IMG_8308.JPG
1370 -    importing IMG_8304.JPG
1371 +  importing IMG_8314.JPG
1372 +  importing IMG_8274.JPG
1373 +  importing IMG_8286.JPG
1374 +  importing IMG_8308.JPG
1375 +  importing IMG_8304.JPG
1376 
1377  .. Note::
1378 -  The -F option tell that folders should be mapped, hence my photos will be
1379 -  all under a Folder entity corresponding to the file-system folder.
1380 +  The -F option means that folders should be mapped, hence my photos will be
1381 +  linked to a Folder entity corresponding to the file-system folder.
1382 
1383  Let's take a look at the web ui:
1384 
1385  .. image:: ../../images/tutos-photowebsite_ui1.png
1386 
@@ -101,15 +101,15 @@
1387  By default, files are only accessible to authenticated users, and I'm looking at
1388  the site as anonymous, e.g. not authenticated. If I login, I can now see:
1389 
1390  .. image:: ../../images/tutos-photowebsite_ui2.png
1391 
1392 -Yeah, it's there! You can also notice that I can see some entities as well as
1393 +Yeah, it's there! You will notice that I can see some entities as well as
1394  folders and images the anonymous user can't. It just works **everywhere in the
1395  ui** since it's handled at the repository level, thanks to our security model.
1396 
1397 -Now if I click on the newly inserted folder, I can see
1398 +Now if I click on the recently inserted folder, I can see
1399 
1400  .. image:: ../../images/tutos-photowebsite_ui3.png
1401 
1402  Great! There is even my pictures in the folder. I can know give to this folder a
1403  nicer name (provided I don't intend to import from it anymore, else already
@@ -122,10 +122,10 @@
1404  ~~~~~~~~~~
1405 
1406  We started to see here an advanced feature of our repository: the ability
1407  to store some parts of our data-model into a custom storage, outside the
1408  database. There is currently only the :class:`BytesFileSystemStorage` available,
1409 -but you can expect to see more coming in a near future (our write your own!).
1410 +but you can expect to see more coming in a near future (or write your own!).
1411 
1412  Also, we can know start to feed our web-site with some nice pictures!
1413  The site isn't perfect (far from it actually) but it's usable, and we can
1414  now start using it and improve it on the way. The Incremental Cubic Way :)
diff --git a/entities/test/unittest_wfobjs.py b/entities/test/unittest_wfobjs.py
@@ -125,10 +125,11 @@
1415          # test a std user get no possible transition
1416          cnx = self.login('member')
1417          # fetch the entity using the new session
1418          trs = list(cnx.user().cw_adapt_to('IWorkflowable').possible_transitions())
1419          self.assertEqual(len(trs), 0)
1420 +        cnx.close()
1421 
1422      def _test_manager_deactivate(self, user):
1423          iworkflowable = user.cw_adapt_to('IWorkflowable')
1424          user.cw_clear_relation_cache('in_state', 'subject')
1425          self.assertEqual(len(user.in_state), 1)
@@ -209,10 +210,11 @@
1426          iworkflowable.fire_transition('deactivate')
1427          cnx.commit()
1428          with self.assertRaises(ValidationError) as cm:
1429              iworkflowable.fire_transition('activate')
1430          self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
1431 +        cnx.close()
1432 
1433      def test_fire_transition_owned_by(self):
1434          self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
1435                       'X expression "X owned_by U", T condition X '
1436                       'WHERE T name "deactivate"')
diff --git a/hooks/test/unittest_syncschema.py b/hooks/test/unittest_syncschema.py
@@ -28,22 +28,14 @@
1437      del SchemaModificationHooksTC.schema_eids
1438 
1439  class SchemaModificationHooksTC(CubicWebTC):
1440      reset_schema = True
1441 
1442 -    @classmethod
1443 -    def init_config(cls, config):
1444 -        super(SchemaModificationHooksTC, cls).init_config(config)
1445 -        # we have to read schema from the database to get eid for schema entities
1446 -        config._cubes = None
1447 -        cls.repo.fill_schema()
1448 -        cls.schema_eids = schema_eids_idx(cls.repo.schema)
1449 -
1450 -    @classmethod
1451 -    def _refresh_repo(cls):
1452 -        super(SchemaModificationHooksTC, cls)._refresh_repo()
1453 -        restore_schema_eids_idx(cls.repo.schema, cls.schema_eids)
1454 +    def setUp(self):
1455 +        super(SchemaModificationHooksTC, self).setUp()
1456 +        self.repo.fill_schema()
1457 +        self.__class__.schema_eids = schema_eids_idx(self.repo.schema)
1458 
1459      def index_exists(self, etype, attr, unique=False):
1460          self.session.set_pool()
1461          dbhelper = self.session.pool.source('system').dbhelper
1462          sqlcursor = self.session.pool['system']
diff --git a/migration.py b/migration.py
@@ -432,10 +432,13 @@
1463          show_diffs(configfile, newconfig, askconfirm=self.confirm is not yes)
1464          os.close(fd)
1465          if exists(newconfig):
1466              os.unlink(newconfig)
1467 
1468 +    # these are overridden by set_log_methods below
1469 +    # only defining here to prevent pylint from complaining
1470 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1471 
1472  from logging import getLogger
1473  from cubicweb import set_log_methods
1474  set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
1475 
diff --git a/misc/migration/3.10.9_Any.py b/misc/migration/3.10.9_Any.py
@@ -1,29 +1,31 @@
1476  from __future__ import with_statement
1477  import sys
1478 
1479 -# fix some corrupted entities noticed on several instances
1480 -rql('DELETE CWConstraint X WHERE NOT E constrained_by X')
1481 -rql('SET X is_instance_of Y WHERE X is Y, NOT X is_instance_of Y')
1482 -commit()
1483 +
1484 +if confirm('fix some corrupted entities noticed on several instances?'):
1485 +    rql('DELETE CWConstraint X WHERE NOT E constrained_by X')
1486 +    rql('SET X is_instance_of Y WHERE X is Y, NOT X is_instance_of Y')
1487 +    commit()
1488 
1489  if confirm('fix existing cwuri?'):
1490 -    from logilab.common.shellutils import ProgressBar
1491 +    from logilab.common.shellutils import progress
1492      from cubicweb.server.session import hooks_control
1493      rset = rql('Any X, XC WHERE X cwuri XC, X cwuri ~= "%/eid/%"')
1494 -    if sys.stdout.isatty():
1495 -        pb = ProgressBar(nbops=rset.rowcount, size=70)
1496 -    else:
1497 -        pb = None
1498 -    with hooks_control(session, session.HOOKS_DENY_ALL, 'integrity'):
1499 -        for i,  e in enumerate(rset.entities()):
1500 -            e.set_attributes(cwuri=e.cwuri.replace('/eid', ''))
1501 -            if i % 100: # commit every 100 entities to limit memory consumption
1502 -                commit(ask_confirm=False)
1503 -            if pb is not None:
1504 +    title = "%i entites to fix" % len(rset)
1505 +    nbops = rset.rowcount
1506 +    enabled = interactive_mode
1507 +    with progress(title=title, nbops=nbops, size=30, enabled=enabled) as pb:
1508 +        with hooks_control(session, session.HOOKS_DENY_ALL, 'integrity'):
1509 +            for i,  row in enumerate(rset):
1510 +                data = {'eid': row[0], 'cwuri': row[1].replace(u'/eid', u'')}
1511 +                rql('SET X cwuri %(cwuri)s WHERE X eid %(eid)s', data)
1512 +                if not i % 100: # commit every 100 entities to limit memory consumption
1513 +                    pb.text = "%i committed" % i
1514 +                    commit(ask_confirm=False)
1515                  pb.update()
1516 -    commit(ask_confirm=False)
1517 +        commit(ask_confirm=False)
1518 
1519  try:
1520      from cubicweb import devtools
1521      option_group_changed('anonymous-user', 'main', 'web')
1522      option_group_changed('anonymous-password', 'main', 'web')
diff --git a/rtags.py b/rtags.py
@@ -158,10 +158,13 @@
1523      def etype_get(self, etype, rtype, role, ttype='*'):
1524          if role == 'subject':
1525              return self.get(etype, rtype, ttype, role)
1526          return self.get(ttype, rtype, etype, role)
1527 
1528 +    # these are overridden by set_log_methods below
1529 +    # only defining here to prevent pylint from complaining
1530 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1531 
1532 
1533  class RelationTagsSet(RelationTags):
1534      """This class associates a set of tags to each key.
1535      """
diff --git a/schema.py b/schema.py
@@ -795,10 +795,11 @@
1536      def match_condition(self, session, eidfrom, eidto):
1537          return len(self.exec_query(session, eidfrom, eidto)) <= 1
1538 
1539 
1540  class RQLExpression(object):
1541 +
1542      def __init__(self, expression, mainvars, eid):
1543          self.eid = eid # eid of the entity representing this rql expression
1544          if not isinstance(mainvars, unicode):
1545              mainvars = unicode(mainvars)
1546          self.mainvars = mainvars
@@ -941,10 +942,14 @@
1547 
1548      @property
1549      def minimal_rql(self):
1550          return 'Any %s WHERE %s' % (self.mainvars, self.expression)
1551 
1552 +    # these are overridden by set_log_methods below
1553 +    # only defining here to prevent pylint from complaining
1554 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1555 +
1556 
1557  class ERQLExpression(RQLExpression):
1558      def __init__(self, expression, mainvars=None, eid=None):
1559          RQLExpression.__init__(self, expression, mainvars or 'X', eid)
1560 
@@ -1101,10 +1106,13 @@
1561 
1562      def unhandled_file(self, filepath):
1563          """called when a file without handler associated has been found"""
1564          self.warning('ignoring file %r', filepath)
1565 
1566 +    # these are overridden by set_log_methods below
1567 +    # only defining here to prevent pylint from complaining
1568 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1569 
1570  class CubicWebSchemaLoader(BootstrapSchemaLoader):
1571      """cubicweb specific schema loader, automatically adding metadata to the
1572      instance's schema
1573      """
@@ -1138,10 +1146,13 @@
1574          for cube in cubes:
1575              for filepath in self.get_schema_files(cube):
1576                  self.info('loading %s', filepath)
1577                  self.handle_file(filepath)
1578 
1579 +    # these are overridden by set_log_methods below
1580 +    # only defining here to prevent pylint from complaining
1581 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1582 
1583  set_log_methods(CubicWebSchemaLoader, getLogger('cubicweb.schemaloader'))
1584  set_log_methods(BootstrapSchemaLoader, getLogger('cubicweb.bootstrapschemaloader'))
1585  set_log_methods(RQLExpression, getLogger('cubicweb.schema'))
1586 
diff --git a/server/__init__.py b/server/__init__.py
@@ -60,13 +60,13 @@
1587              DEBUG |= globals()[mode]
1588      else:
1589          DEBUG |= debugmode
1590 
1591  class debugged(object):
1592 -    """repository debugging context manager / decorator
1593 +    """Context manager and decorator to help debug the repository.
1594 
1595 -    Can be used either as a context manager:
1596 +    It can be used either as a context manager:
1597 
1598      >>> with debugged(server.DBG_RQL | server.DBG_REPO):
1599      ...     # some code in which you want to debug repository activity,
1600      ...     # seing information about RQL being executed an repository events.
1601 
@@ -75,12 +75,12 @@
1602      >>> @debugged(server.DBG_RQL | server.DBG_REPO)
1603      ... def some_function():
1604      ...     # some code in which you want to debug repository activity,
1605      ...     # seing information about RQL being executed an repository events
1606 
1607 -    debug mode will be reseted at its original value when leaving the "with"
1608 -    block or the decorated function
1609 +    The debug mode will be reset to its original value when leaving the "with"
1610 +    block or the decorated function.
1611      """
1612      def __init__(self, debugmode):
1613          self.debugmode = debugmode
1614          self._clevel = None
1615 
diff --git a/server/hook.py b/server/hook.py
@@ -452,10 +452,13 @@
1616      events = None
1617      category = None
1618      order = 0
1619      # XXX deprecated
1620      enabled = True
1621 +    # stop pylint from complaining about missing attributes in Hooks classes
1622 +    eidfrom = eidto = entity = rtype = None
1623 +
1624 
1625      @classmethod
1626      def check_events(cls):
1627          try:
1628              for event in cls.events:
@@ -755,10 +758,14 @@
1629      @property
1630      @deprecated('[3.6] use self.session.vreg.config')
1631      def config(self):
1632          return self.session.repo.config
1633 
1634 +    # these are overridden by set_log_methods below
1635 +    # only defining here to prevent pylint from complaining
1636 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1637 +
1638  set_log_methods(Operation, getLogger('cubicweb.session'))
1639 
1640  def _container_add(container, value):
1641      {set: set.add, list: list.append}[container.__class__](container, value)
1642 
diff --git a/server/mssteps.py b/server/mssteps.py
@@ -160,10 +160,11 @@
1643              sql = 'INSERT INTO %s %s' % (self.outputtable, sql)
1644          return self.plan.sqlexec(sql, self.plan.args)
1645 
1646      def get_sql(self):
1647          self.inputmap = inputmap = self.children[-1].outputmap
1648 +        dbhelper=self.plan.syssource.dbhelper
1649          # get the select clause
1650          clause = []
1651          for i, term in enumerate(self.selection):
1652              try:
1653                  var_name = inputmap[term.as_string()]
@@ -216,16 +217,20 @@
1654                  if grouped is not None:
1655                      for vref in sortterm.iget_nodes(VariableRef):
1656                          if not vref.name in grouped:
1657                              sql[-1] += ', ' + self.inputmap[vref.name]
1658                              grouped.add(vref.name)
1659 -            sql.append('ORDER BY %s' % ', '.join(clause))
1660 -        if self.limit:
1661 -            sql.append('LIMIT %s' % self.limit)
1662 -        if self.offset:
1663 -            sql.append('OFFSET %s' % self.offset)
1664 -        return ' '.join(sql)
1665 +            sql = dbhelper.sql_add_order_by(' '.join(sql),
1666 +                                            clause,
1667 +                                            None, False,
1668 +                                            self.limit or self.offset)
1669 +        else:
1670 +            sql = ' '.join(sql)
1671 +            clause = None
1672 +
1673 +        sql = dbhelper.sql_add_limit_offset(sql, self.limit, self.offset, clause)
1674 +        return sql
1675 
1676      def visit_function(self, function):
1677          """generate SQL name for a function"""
1678          try:
1679              return self.children[0].outputmap[str(function)]
diff --git a/server/querier.py b/server/querier.py
@@ -745,9 +745,13 @@
1680              # FIXME: get number of affected entities / relations on non
1681              # selection queries ?
1682          # return a result set object
1683          return ResultSet(results, rql, args, descr, orig_rqlst)
1684 
1685 +    # these are overridden by set_log_methods below
1686 +    # only defining here to prevent pylint from complaining
1687 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1688 +
1689  from logging import getLogger
1690  from cubicweb import set_log_methods
1691  LOGGER = getLogger('cubicweb.querier')
1692  set_log_methods(QuerierHelper, LOGGER)
diff --git a/server/repository.py b/server/repository.py
@@ -1456,10 +1456,13 @@
1693      def is_multi_sources_relation(self, rtype):
1694          return any(source for source in self.sources
1695                     if not source is self.system_source
1696                     and source.support_relation(rtype))
1697 
1698 +    # these are overridden by set_log_methods below
1699 +    # only defining here to prevent pylint from complaining
1700 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1701 
1702  def pyro_unregister(config):
1703      """unregister the repository from the pyro name server"""
1704      from logilab.common.pyro_ext import ns_unregister
1705      appid = config['pyro-instance-id'] or config.appid
diff --git a/server/session.py b/server/session.py
@@ -1030,10 +1030,14 @@
1706      @deprecated('[3.4] use entity_from_eid(eid, etype=None)')
1707      def entity(self, eid):
1708          """return a result set for the given eid"""
1709          return self.entity_from_eid(eid)
1710 
1711 +    # these are overridden by set_log_methods below
1712 +    # only defining here to prevent pylint from complaining
1713 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1714 +
1715 
1716  class InternalSession(Session):
1717      """special session created internaly by the repository"""
1718      is_internal_session = True
1719      running_dbapi_query = False
diff --git a/server/sources/rql2sql.py b/server/sources/rql2sql.py
@@ -618,28 +618,34 @@
1720                  sql += '\nGROUP BY %s' % groups
1721              if having:
1722                  sql += '\nHAVING %s' % having
1723              # sort
1724              if sorts:
1725 -                sqlsortterms = [self._sortterm_sql(sortterm, fselectidx)
1726 -                                for sortterm in sorts]
1727 -                sqlsortterms = [x for x in sqlsortterms if x is not None]
1728 +                sqlsortterms = []
1729 +                for sortterm in sorts:
1730 +                    _term = self._sortterm_sql(sortterm, fselectidx)
1731 +                    if _term is not None:
1732 +                        sqlsortterms.append(_term)
1733                  if sqlsortterms:
1734 -                    sql += '\nORDER BY %s' % ','.join(sqlsortterms)
1735 -                    if sorts and fneedwrap:
1736 -                        selection = ['T1.C%s' % i for i in xrange(len(origselection))]
1737 -                        sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql)
1738 +                    sql = self.dbhelper.sql_add_order_by(sql, sqlsortterms,
1739 +                                                         origselection,
1740 +                                                         fneedwrap,
1741 +                                                         select.limit or select.offset)
1742 +                    ## sql += '\nORDER BY %s' % ','.join(sqlsortterms)
1743 +                    ## if sorts and fneedwrap:
1744 +                    ##     selection = ['T1.C%s' % i for i in xrange(len(origselection))]
1745 +                    ##     sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql)
1746 +            else:
1747 +                sqlsortterms = None
1748              state.finalize_source_cbs()
1749          finally:
1750              select.selection = origselection
1751          # limit / offset
1752 -        limit = select.limit
1753 -        if limit:
1754 -            sql += '\nLIMIT %s' % limit
1755 -        offset = select.offset
1756 -        if offset:
1757 -            sql += '\nOFFSET %s' % offset
1758 +        sql = self.dbhelper.sql_add_limit_offset(sql,
1759 +                                                 select.limit,
1760 +                                                 select.offset,
1761 +                                                 sqlsortterms)
1762          return sql
1763 
1764      def _subqueries_sql(self, select, state):
1765          for i, subquery in enumerate(select.with_):
1766              sql = self.union_sql(subquery.query, needalias=True)
diff --git a/server/sqlutils.py b/server/sqlutils.py
@@ -282,10 +282,13 @@
1767                      value = self._binary(value.getvalue())
1768              attrs[SQL_PREFIX+str(attr)] = value
1769          attrs[SQL_PREFIX+'eid'] = entity.eid
1770          return attrs
1771 
1772 +    # these are overridden by set_log_methods below
1773 +    # only defining here to prevent pylint from complaining
1774 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
1775 
1776  from logging import getLogger
1777  from cubicweb import set_log_methods
1778  set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter'))
1779 
diff --git a/server/test/unittest_ldapuser.py b/server/test/unittest_ldapuser.py
@@ -22,14 +22,15 @@
1780  import time
1781  from os.path import abspath, join, exists
1782  import subprocess
1783  from socket import socket, error as socketerror
1784 
1785 -from logilab.common.testlib import TestCase, unittest_main, mock_object
1786 +from logilab.common.testlib import TestCase, unittest_main, mock_object, Tags
1787  from cubicweb.devtools.testlib import CubicWebTC
1788  from cubicweb.devtools.repotest import RQLGeneratorTC
1789  from cubicweb.devtools.httptest import get_available_port
1790 +from cubicweb.devtools import get_test_db_handler
1791 
1792  from cubicweb.server.sources.ldapuser import *
1793 
1794  SYT = 'syt'
1795  SYT_EMAIL = 'Sylvain Thenault'
@@ -62,30 +63,14 @@
1796      # don't check upassword !
1797      return self.extid2eid(user['dn'], 'CWUser', session)
1798 
1799  def setUpModule(*args):
1800      create_slapd_configuration(LDAPUserSourceTC.config)
1801 -    global repo
1802 -    try:
1803 -        LDAPUserSourceTC._init_repo()
1804 -        repo = LDAPUserSourceTC.repo
1805 -        add_ldap_source(LDAPUserSourceTC.cnx)
1806 -    except:
1807 -        terminate_slapd()
1808 -        raise
1809 
1810  def tearDownModule(*args):
1811 -    global repo
1812 -    repo.shutdown()
1813 -    del repo
1814      terminate_slapd()
1815 
1816 -def add_ldap_source(cnx):
1817 -    cnx.request().create_entity('CWSource', name=u'ldapuser', type=u'ldapuser',
1818 -                                config=CONFIG)
1819 -    cnx.commit()
1820 -
1821  def create_slapd_configuration(config):
1822      global slapd_process, CONFIG
1823      basedir = join(config.apphome, "ldapdb")
1824      slapdconf = join(config.apphome, "slapd.conf")
1825      confin = file(join(config.apphome, "slapd.conf.in")).read()
@@ -125,14 +110,23 @@
1826          else:
1827              import os, signal
1828              os.kill(slapd_process.pid, signal.SIGTERM)
1829          slapd_process.wait()
1830          print "DONE"
1831 -
1832      del slapd_process
1833 
1834  class LDAPUserSourceTC(CubicWebTC):
1835 +    test_db_id = 'ldap-user'
1836 +    tags = CubicWebTC.tags | Tags(('ldap'))
1837 +
1838 +    @classmethod
1839 +    def pre_setup_database(cls, session, config):
1840 +        session.create_entity('CWSource', name=u'ldapuser', type=u'ldapuser',
1841 +                                    config=CONFIG)
1842 +        session.commit()
1843 +        # XXX keep it there
1844 +        session.execute('CWUser U')
1845 
1846      def patch_authenticate(self):
1847          self._orig_authenticate = LDAPUserSource.authenticate
1848          LDAPUserSource.authenticate = nopwd_authenticate
1849 
@@ -273,28 +267,31 @@
1850          self.session.set_pool()
1851          self.session.create_entity('CWGroup', name=u'bougloup1')
1852          self.session.create_entity('CWGroup', name=u'bougloup2')
1853          self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"')
1854          self.sexecute('SET U in_group G WHERE G name = "bougloup1", U login %(syt)s', {'syt': SYT})
1855 -        rset = self.sexecute('Any L,SN ORDERBY L WHERE X in_state S, S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")')
1856 +        rset = self.sexecute('Any L,SN ORDERBY L WHERE X in_state S, '
1857 +                             'S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")')
1858          self.assertEqual(rset.rows, [['admin', 'activated'], [SYT, 'activated']])
1859 
1860      def test_exists2(self):
1861          self.create_user('comme')
1862          self.create_user('cochon')
1863          self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
1864 -        rset = self.sexecute('Any GN ORDERBY GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))')
1865 +        rset = self.sexecute('Any GN ORDERBY GN WHERE X in_group G, G name GN, '
1866 +                             '(G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))')
1867          self.assertEqual(rset.rows, [['managers'], ['users']])
1868 
1869      def test_exists3(self):
1870          self.create_user('comme')
1871          self.create_user('cochon')
1872          self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
1873          self.failUnless(self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"'))
1874          self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT})
1875          self.failUnless(self.sexecute('Any X, Y WHERE X copain Y, X login %(syt)s, Y login "cochon"', {'syt': SYT}))
1876 -        rset = self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon"))')
1877 +        rset = self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" '
1878 +                             'OR EXISTS(X copain T, T login in ("comme", "cochon"))')
1879          self.assertEqual(sorted(rset.rows), [['managers', 'admin'], ['users', 'comme'], ['users', SYT]])
1880 
1881      def test_exists4(self):
1882          self.create_user('comme')
1883          self.create_user('cochon', groups=('users', 'guests'))
@@ -395,12 +392,14 @@
1884          self.sexecute('Any X,AA WHERE X use_email Y, Y eid %(x)s, X modification_date AA',
1885                       {'x': emaileid})
1886 
1887      def test_nonregr5(self):
1888          # original jpl query:
1889 -        # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser, U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5
1890 -        rql = 'Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, U login "%s", P is X, X creation_date CD' % self.session.user.login
1891 +        # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser,
1892 +        # U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5
1893 +        rql = ('Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, '
1894 +               'U login "%s", P is X, X creation_date CD') % self.session.user.login
1895          self.sexecute(rql, )#{'x': })
1896 
1897      def test_nonregr6(self):
1898          self.sexecute('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
1899                       'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (EXISTS(U identity ME) '
@@ -443,22 +442,33 @@
1900          res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
1901          self.assertEqual(res, [[1, 5], [2, 4], [3, 6]])
1902 
1903  class RQL2LDAPFilterTC(RQLGeneratorTC):
1904 
1905 +    tags = RQLGeneratorTC.tags | Tags(('ldap'))
1906 +
1907 +    @property
1908 +    def schema(self):
1909 +        """return the application schema"""
1910 +        return self._schema
1911 +
1912      def setUp(self):
1913 -        self.schema = repo.schema
1914 -        RQLGeneratorTC.setUp(self)
1915 +        self.handler = get_test_db_handler(LDAPUserSourceTC.config)
1916 +        self.handler.build_db_cache('ldap-user', LDAPUserSourceTC.pre_setup_database)
1917 +        self.handler.restore_database('ldap-user')
1918 +        self._repo = repo = self.handler.get_repo()
1919 +        self._schema = repo.schema
1920 +        super(RQL2LDAPFilterTC, self).setUp()
1921          ldapsource = repo.sources[-1]
1922          self.pool = repo._get_pool()
1923          session = mock_object(pool=self.pool)
1924          self.o = RQL2LDAPFilter(ldapsource, session)
1925          self.ldapclasses = ''.join(ldapsource.base_filters)
1926 
1927      def tearDown(self):
1928 -        repo._free_pool(self.pool)
1929 -        RQLGeneratorTC.tearDown(self)
1930 +        self._repo.turn_repo_off()
1931 +        super(RQL2LDAPFilterTC, self).tearDown()
1932 
1933      def test_base(self):
1934          rqlst = self._prepare('CWUser X WHERE X login "toto"').children[0]
1935          self.assertEqual(self.o.generate(rqlst, 'X')[1],
1936                            '(&%s(uid=toto))' % self.ldapclasses)
diff --git a/server/test/unittest_migractions.py b/server/test/unittest_migractions.py
@@ -21,11 +21,11 @@
1937 
1938  from copy import deepcopy
1939  from datetime import date
1940  from os.path import join
1941 
1942 -from logilab.common.testlib import TestCase, unittest_main
1943 +from logilab.common.testlib import TestCase, unittest_main, Tags, tag
1944 
1945  from yams.constraints import UniqueConstraint
1946 
1947  from cubicweb import ConfigurationError, ValidationError
1948  from cubicweb.devtools.testlib import CubicWebTC
@@ -35,18 +35,22 @@
1949 
1950  migrschema = None
1951  def tearDownModule(*args):
1952      global migrschema
1953      del migrschema
1954 -    del MigrationCommandsTC.origschema
1955 +    if hasattr(MigrationCommandsTC, 'origschema'):
1956 +        del MigrationCommandsTC.origschema
1957 
1958  class MigrationCommandsTC(CubicWebTC):
1959 
1960 +    tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions'))
1961 +
1962      @classmethod
1963 -    def init_config(cls, config):
1964 -        super(MigrationCommandsTC, cls).init_config(config)
1965 +    def _init_repo(cls):
1966 +        super(MigrationCommandsTC, cls)._init_repo()
1967          # we have to read schema from the database to get eid for schema entities
1968 +        config = cls.config
1969          config._cubes = None
1970          cls.repo.fill_schema()
1971          cls.origschema = deepcopy(cls.repo.schema)
1972          # hack to read the schema from data/migrschema
1973          config.appid = join('data', 'migratedapp')
@@ -55,22 +59,10 @@
1974          migrschema = config.load_schema()
1975          config.appid = 'data'
1976          config._apphome = cls.datadir
1977          assert 'Folder' in migrschema
1978 
1979 -    @classmethod
1980 -    def _refresh_repo(cls):
1981 -        super(MigrationCommandsTC, cls)._refresh_repo()
1982 -        cls.repo.set_schema(deepcopy(cls.origschema), resetvreg=False)
1983 -        # reset migration schema eids
1984 -        for eschema in migrschema.entities():
1985 -            eschema.eid = None
1986 -        for rschema in migrschema.relations():
1987 -            rschema.eid = None
1988 -            for rdef in rschema.rdefs.values():
1989 -                rdef.eid = None
1990 -
1991      def setUp(self):
1992          CubicWebTC.setUp(self)
1993          self.mh = ServerMigrationHelper(self.repo.config, migrschema,
1994                                          repo=self.repo, cnx=self.cnx,
1995                                          interactive=False)
@@ -341,10 +333,11 @@
1996              self.assertEqual(card, True)
1997          finally:
1998              self.mh.cmd_change_relation_props('Personne', 'adel', 'String',
1999                                                fulltextindexed=False)
2000 
2001 +    @tag('longrun')
2002      def test_sync_schema_props_perms(self):
2003          cursor = self.mh.session
2004          cursor.set_pool()
2005          nbrqlexpr_start = cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0]
2006          migrschema['titre'].rdefs[('Personne', 'String')].order = 7
@@ -462,10 +455,11 @@
2007          try:
2008              self.mh.cmd_set_size_constraint('CWEType', 'description', 256)
2009          finally:
2010              self.mh.cmd_set_size_constraint('CWEType', 'description', None)
2011 
2012 +    @tag('longrun')
2013      def test_add_remove_cube_and_deps(self):
2014          cubes = set(self.config.cubes())
2015          schema = self.repo.schema
2016          self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.keys()),
2017                            sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
@@ -525,10 +519,11 @@
2018              # why this commit is necessary is unclear to me (though without it
2019              # next test may fail complaining of missing tables
2020              self.commit()
2021 
2022 
2023 +    @tag('longrun')
2024      def test_add_remove_cube_no_deps(self):
2025          cubes = set(self.config.cubes())
2026          schema = self.repo.schema
2027          try:
2028              try:
@@ -556,10 +551,11 @@
2029      def test_remove_dep_cube(self):
2030          with self.assertRaises(ConfigurationError) as cm:
2031              self.mh.cmd_remove_cube('file')
2032          self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency")
2033 
2034 +    @tag('longrun')
2035      def test_introduce_base_class(self):
2036          self.mh.cmd_add_entity_type('Para')
2037          self.mh.repo.schema.rebuild_infered_relations()
2038          self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
2039                            ['Note'])
diff --git a/server/test/unittest_msplanner.py b/server/test/unittest_msplanner.py
@@ -428,11 +428,11 @@
2040 
2041      def test_simple_system_ldap_ordered_limit_offset(self):
2042          """retrieve CWUser X from both sources and return concatenation of results
2043          """
2044          self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10',
2045 -                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [
2046 +                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None, [
2047                         ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])],
2048                          [self.ldap, self.system], {}, {'X': 'table0.C0'}, []),
2049                         ]),
2050                     ])
2051      def test_simple_system_ldap_aggregat(self):
@@ -513,20 +513,20 @@
2052                       [self.system], {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, [])
2053                      ])
2054 
2055      def test_complex_ordered(self):
2056          self._test('Any L ORDERBY L WHERE X login L',
2057 -                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0', None,
2058 +                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0', None,
2059                       [('FetchStep', [('Any L WHERE X login L, X is CWUser',
2060                                        [{'X': 'CWUser', 'L': 'String'}])],
2061                         [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []),
2062                        ])
2063                      ])
2064 
2065      def test_complex_ordered_limit_offset(self):
2066          self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L',
2067 -                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None,
2068 +                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None,
2069                       [('FetchStep', [('Any L WHERE X login L, X is CWUser',
2070                                        [{'X': 'CWUser', 'L': 'String'}])],
2071                         [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []),
2072                        ])
2073                      ])
@@ -608,11 +608,11 @@
2074          1. retrieve CWUser X from system and ldap sources, Person X from system source only, store
2075             each result in the same temp table
2076          2. return content of the table sorted
2077          """
2078          self._test('Any X,F ORDERBY F WHERE X firstname F',
2079 -                   [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1', None,
2080 +                   [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', None,
2081                       [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser',
2082                                        [{'X': 'CWUser', 'F': 'String'}])],
2083                         [self.ldap, self.system], {},
2084                         {'X': 'table0.C0', 'X.firstname': 'table0.C1', 'F': 'table0.C1'}, []),
2085                        ('FetchStep', [('Any X,F WHERE X firstname F, X is Personne',
@@ -1342,11 +1342,11 @@
2086 
2087      def test_has_text_orderby_rank(self):
2088          self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"',
2089                     [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
2090                       [self.ldap, self.system], None, {'X': 'table0.C0'}, []),
2091 -                    ('AggrStep', 'SELECT table1.C1 FROM table1 ORDER BY table1.C0', None, [
2092 +                    ('AggrStep', 'SELECT table1.C1 FROM table1\nORDER BY table1.C0', None, [
2093                          ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser',
2094                                          [{'X': 'CWUser'}])],
2095                           [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []),
2096                          ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X firstname "bla", X is Personne',
2097                                          [{'X': 'Personne'}])],
@@ -1399,20 +1399,20 @@
2098                          ]),
2099                      ])
2100 
2101      def test_sort_func(self):
2102          self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF',
2103 -                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [
2104 +                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None, [
2105                         ('FetchStep', [('Any X,RF WHERE X type RF, X is Note',
2106                                         [{'X': 'Note', 'RF': 'String'}])],
2107                          [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []),
2108                         ])
2109                      ])
2110 
2111      def test_ambigous_sort_func(self):
2112          self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)',
2113 -                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None,
2114 +                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None,
2115                       [('FetchStep', [('Any X,RF WHERE X title RF, X is Card',
2116                                        [{'X': 'Card', 'RF': 'String'}])],
2117                         [self.cards, self.system], {},
2118                         {'X': 'table0.C0', 'X.title': 'table0.C1', 'RF': 'table0.C1'}, []),
2119                        ('FetchStep', [('Any X,RF WHERE X title RF, X is IN(Bookmark, EmailThread)',
@@ -1895,11 +1895,11 @@
2120          self.cards.support_relations['see_also'] = True
2121          self.cards.cross_relations.add('see_also')
2122          try:
2123              self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA',
2124                         [('AggrStep',
2125 -                         'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1',
2126 +                         'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1',
2127                           None,
2128                           [('FetchStep',
2129                             [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note',
2130                               [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {},
2131                             {'AA': 'table0.C1', 'X': 'table0.C0',
@@ -2069,11 +2069,11 @@
2132          self.cards.support_relations['see_also'] = True
2133          self.cards.cross_relations.add('see_also')
2134          try:
2135              self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA',
2136                         [('AggrStep',
2137 -                         'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1',
2138 +                         'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1',
2139                           None,
2140                           [('FetchStep',
2141                             [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note',
2142                               [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {},
2143                             {'AA': 'table0.C1', 'X': 'table0.C0',
@@ -2116,11 +2116,11 @@
2144                                      [{'X': 'Note', 'D': 'Datetime'}])],
2145                       [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, []),
2146                      ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser',
2147                                      [{'X': 'CWUser', 'D': 'Datetime'}])],
2148                       [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []),
2149 -                    ('AggrStep', 'SELECT table2.C0 FROM table2 ORDER BY table2.C1 DESC', None, [
2150 +                    ('AggrStep', 'SELECT table2.C0 FROM table2\nORDER BY table2.C1 DESC', None, [
2151                          ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid,
2152                                          [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])],
2153                           [self.system],
2154                           {},
2155                           {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []),
@@ -2265,11 +2265,11 @@
2156          self._test('Any X ORDERBY Z DESC WHERE X modification_date Z, E eid %(x)s, E see_also X',
2157                     [('FetchStep', [('Any X,Z WHERE X modification_date Z, X is Note',
2158                                      [{'X': 'Note', 'Z': 'Datetime'}])],
2159                       [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'},
2160                       []),
2161 -                    ('AggrStep', 'SELECT table1.C0 FROM table1 ORDER BY table1.C1 DESC', None,
2162 +                    ('AggrStep', 'SELECT table1.C0 FROM table1\nORDER BY table1.C1 DESC', None,
2163                       [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark',
2164                                        [{'X': 'Bookmark', 'Z': 'Datetime'}])],
2165                         [self.system], {},   {'X': 'table1.C0', 'X.modification_date': 'table1.C1',
2166                                               'Z': 'table1.C1'},
2167                         []),
diff --git a/server/test/unittest_multisources.py b/server/test/unittest_multisources.py
@@ -18,13 +18,13 @@
2168 
2169  from datetime import datetime, timedelta
2170  from itertools import repeat
2171 
2172  from cubicweb.devtools import TestServerConfiguration, init_test_database
2173 -from cubicweb.devtools.testlib import CubicWebTC, refresh_repo
2174 +from cubicweb.devtools.testlib import CubicWebTC, Tags
2175  from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch
2176 -
2177 +from cubicweb.devtools import get_test_db_handler
2178 
2179  class ExternalSource1Configuration(TestServerConfiguration):
2180      sourcefile = 'sources_extern'
2181 
2182  class ExternalSource2Configuration(TestServerConfiguration):
@@ -50,85 +50,101 @@
2183      source.init_mapping(zip(('Card', 'Affaire', 'State',
2184                               'in_state', 'documented_by', 'multisource_inlined_rel'),
2185                              repeat(u'write')))
2186 
2187 
2188 -def setUpModule(*args):
2189 -    global repo2, cnx2, repo3, cnx3
2190 -    cfg1 = ExternalSource1Configuration('data', apphome=TwoSourcesTC.datadir)
2191 -    repo2, cnx2 = init_test_database(config=cfg1)
2192 -    cfg2 = ExternalSource2Configuration('data', apphome=TwoSourcesTC.datadir)
2193 -    repo3, cnx3 = init_test_database(config=cfg2)
2194 -    src = cnx3.request().create_entity('CWSource', name=u'extern',
2195 -                                       type=u'pyrorql', config=EXTERN_SOURCE_CFG)
2196 -    cnx3.commit() # must commit before adding the mapping
2197 -    add_extern_mapping(src)
2198 -    cnx3.commit()
2199 +def pre_setup_database_extern(session, config):
2200 +    session.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"')
2201 +    session.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"')
2202 +    session.execute('INSERT Affaire X: X ref "AFFREF"')
2203 +    session.commit()
2204 
2205 -    TestServerConfiguration.no_sqlite_wrap = True
2206 -    # hi-jack PyroRQLSource.get_connection to access existing connection (no
2207 -    # pyro connection)
2208 -    PyroRQLSource.get_connection = lambda x: x.uri == 'extern-multi' and cnx3 or cnx2
2209 -    # also necessary since the repository is closing its initial connections
2210 -    # pool though we want to keep cnx2 valid
2211 -    Connection.close = lambda x: None
2212 -
2213 -def tearDownModule(*args):
2214 -    PyroRQLSource.get_connection = PyroRQLSource_get_connection
2215 -    Connection.close = Connection_close
2216 -    global repo2, cnx2, repo3, cnx3
2217 -    repo2.shutdown()
2218 -    repo3.shutdown()
2219 -    del repo2, cnx2, repo3, cnx3
2220 -    #del TwoSourcesTC.config.vreg
2221 -    #del TwoSourcesTC.config
2222 -    TestServerConfiguration.no_sqlite_wrap = False
2223 +def pre_setup_database_multi(session, config):
2224 +    session.create_entity('CWSource', name=u'extern', type=u'pyrorql',
2225 +                                 config=EXTERN_SOURCE_CFG)
2226 +    session.commit()
2227 
2228  class TwoSourcesTC(CubicWebTC):
2229      """Main repo -> extern-multi -> extern
2230                    \-------------/
2231      """
2232 +    test_db_id= 'cw-server-multisources'
2233 +    tags = CubicWebTC.tags | Tags(('multisources'))
2234 +
2235      @classmethod
2236 -    def _refresh_repo(cls):
2237 -        super(TwoSourcesTC, cls)._refresh_repo()
2238 -        cnx2.rollback()
2239 -        refresh_repo(repo2)
2240 -        cnx3.rollback()
2241 -        refresh_repo(repo3)
2242 +    def setUpClass(cls):
2243 +        cls._cfg2 = ExternalSource1Configuration('data', apphome=TwoSourcesTC.datadir)
2244 +        cls._cfg3 = ExternalSource2Configuration('data', apphome=TwoSourcesTC.datadir)
2245 +        TestServerConfiguration.no_sqlite_wrap = True
2246 +        # hi-jack PyroRQLSource.get_connection to access existing connection (no
2247 +        # pyro connection)
2248 +        PyroRQLSource.get_connection = lambda x: x.uri == 'extern-multi' and cls.cnx3 or cls.cnx2
2249 +        # also necessary since the repository is closing its initial connections
2250 +        # pool though we want to keep cnx2 valid
2251 +        Connection.close = lambda x: None
2252 +
2253 +    @classmethod
2254 +    def tearDowncls(cls):
2255 +        PyroRQLSource.get_connection = PyroRQLSource_get_connection
2256 +        Connection.close = Connection_close
2257 +        cls.cnx2.close()
2258 +        cls.cnx3.close()
2259 +        TestServerConfiguration.no_sqlite_wrap = False
2260 +
2261 +
2262 +    @classmethod
2263 +    def _init_repo(cls):
2264 +        repo2_handler = get_test_db_handler(cls._cfg2)
2265 +        repo2_handler.build_db_cache('4cards-1affaire',pre_setup_func=pre_setup_database_extern)
2266 +        cls.repo2, cls.cnx2 = repo2_handler.get_repo_and_cnx('4cards-1affaire')
2267 +
2268 +        repo3_handler = get_test_db_handler(cls._cfg3)
2269 +        repo3_handler.build_db_cache('multisource',pre_setup_func=pre_setup_database_multi)
2270 +        cls.repo3, cls.cnx3 = repo3_handler.get_repo_and_cnx('multisource')
2271 +
2272 +
2273 +        super(TwoSourcesTC, cls)._init_repo()
2274 
2275      def setUp(self):
2276          CubicWebTC.setUp(self)
2277 +        self.addCleanup(self.cnx2.close)
2278 +        self.addCleanup(self.cnx3.close)
2279          do_monkey_patch()
2280 
2281      def tearDown(self):
2282          for source in self.repo.sources[1:]:
2283              self.repo.remove_source(source.uri)
2284          CubicWebTC.tearDown(self)
2285 +        self.cnx2.close()
2286 +        self.cnx3.close()
2287          undo_monkey_patch()
2288 
2289 -    def setup_database(self):
2290 -        cu = cnx2.cursor()
2291 -        self.ec1 = cu.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"')[0][0]
2292 -        cu.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"')
2293 -        self.aff1 = cu.execute('INSERT Affaire X: X ref "AFFREF"')[0][0]
2294 -        cnx2.commit()
2295 -        for uri, config in [('extern', EXTERN_SOURCE_CFG),
2296 +    @staticmethod
2297 +    def pre_setup_database(session, config):
2298 +        for uri, src_config in [('extern', EXTERN_SOURCE_CFG),
2299                              ('extern-multi', '''
2300  pyro-ns-id = extern-multi
2301  cubicweb-user = admin
2302  cubicweb-password = gingkow
2303  ''')]:
2304 -            source = self.request().create_entity(
2305 -                'CWSource', name=unicode(uri), type=u'pyrorql',
2306 -                config=unicode(config))
2307 -            self.commit() # must commit before adding the mapping
2308 +            source = session.create_entity('CWSource', name=unicode(uri),
2309 +                                           type=u'pyrorql',
2310 +                                           config=unicode(src_config))
2311 +            session.commit()
2312              add_extern_mapping(source)
2313 -        self.commit()
2314 +
2315 +        session.commit()
2316          # trigger discovery
2317 -        self.sexecute('Card X')
2318 -        self.sexecute('Affaire X')
2319 -        self.sexecute('State X')
2320 +        session.execute('Card X')
2321 +        session.execute('Affaire X')
2322 +        session.execute('State X')
2323 +
2324 +    def setup_database(self):
2325 +        cu2 = self.cnx2.cursor()
2326 +        self.ec1 = cu2.execute('Any X WHERE X is Card, X title "C3: An external card", X wikiid "aaa"')[0][0]
2327 +        self.aff1 = cu2.execute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0]
2328 +        cu2.close()
2329          # add some entities
2330          self.ic1 = self.sexecute('INSERT Card X: X title "C1: An internal card", X wikiid "aaai"')[0][0]
2331          self.ic2 = self.sexecute('INSERT Card X: X title "C2: Ze internal card", X wikiid "zzzi"')[0][0]
2332 
2333      def test_eid_comp(self):
@@ -184,29 +200,29 @@
2334          rset = cu.execute('Any X ORDERBY FTIRANK(X) WHERE X has_text "card"')
2335          self.assertEqual(len(rset), 5, zip(rset.rows, rset.description))
2336          Connection_close(cnx.cnx) # cnx is a TestCaseConnectionProxy
2337 
2338      def test_synchronization(self):
2339 -        cu = cnx2.cursor()
2340 +        cu = self.cnx2.cursor()
2341          assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1})
2342          cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1})
2343          aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0]
2344 -        cnx2.commit()
2345 +        self.cnx2.commit()
2346          try:
2347              # force sync
2348              self.repo.sources_by_uri['extern'].synchronize(MTIME)
2349              self.failUnless(self.sexecute('Any X WHERE X has_text "blah"'))
2350              self.failUnless(self.sexecute('Any X WHERE X has_text "affreux"'))
2351              cu.execute('DELETE Affaire X WHERE X eid %(x)s', {'x': aff2})
2352 -            cnx2.commit()
2353 +            self.cnx2.commit()
2354              self.repo.sources_by_uri['extern'].synchronize(MTIME)
2355              rset = self.sexecute('Any X WHERE X has_text "affreux"')
2356              self.failIf(rset)
2357          finally:
2358              # restore state
2359              cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1})
2360 -            cnx2.commit()
2361 +            self.cnx2.commit()
2362 
2363      def test_simplifiable_var(self):
2364          affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
2365          rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB',
2366                              {'x': affeid})
@@ -232,13 +248,13 @@
2367          self.assertEqual(sorted(r[0] for r in rset.rows), sorted([iec1, self.ic1]))
2368 
2369      def test_greater_eid(self):
2370          rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1))
2371          self.assertEqual(len(rset.rows), 2) # self.ic1 and self.ic2
2372 -        cu = cnx2.cursor()
2373 +        cu = self.cnx2.cursor()
2374          ec2 = cu.execute('INSERT Card X: X title "glup"')[0][0]
2375 -        cnx2.commit()
2376 +        self.cnx2.commit()
2377          # 'X eid > something' should not trigger discovery
2378          rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1))
2379          self.assertEqual(len(rset.rows), 2)
2380          # trigger discovery using another query
2381          crset = self.sexecute('Card X WHERE X title "glup"')
@@ -254,20 +270,20 @@
2382          n2 = self.sexecute('INSERT Note X: X type "AFFREU"')[0][0]
2383          rset = self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X type T, Y ref T')
2384          self.assertEqual(len(rset), 1, rset.rows)
2385 
2386      def test_attr_unification_2(self):
2387 -        cu = cnx2.cursor()
2388 +        cu = self.cnx2.cursor()
2389          ec2 = cu.execute('INSERT Card X: X title "AFFREF"')[0][0]
2390 -        cnx2.commit()
2391 +        self.cnx2.commit()
2392          try:
2393              c1 = self.sexecute('INSERT Card C: C title "AFFREF"')[0][0]
2394              rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T')
2395              self.assertEqual(len(rset), 2, rset.rows)
2396          finally:
2397              cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2})
2398 -            cnx2.commit()
2399 +            self.cnx2.commit()
2400 
2401      def test_attr_unification_neq_1(self):
2402          # XXX complete
2403          self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X creation_date D, Y creation_date > D')
2404 
@@ -315,26 +331,26 @@
2405          notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
2406                                                         {'x': aff1}))
2407          self.assertSetEqual(notstates, states)
2408 
2409      def test_absolute_url_base_url(self):
2410 -        cu = cnx2.cursor()
2411 +        cu = self.cnx2.cursor()
2412          ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0]
2413 -        cnx2.commit()
2414 +        self.cnx2.commit()
2415          lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0)
2416          self.assertEqual(lc.absolute_url(), 'http://extern.org/card/eid/%s' % ceid)
2417          cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid})
2418 -        cnx2.commit()
2419 +        self.cnx2.commit()
2420 
2421      def test_absolute_url_no_base_url(self):
2422 -        cu = cnx3.cursor()
2423 +        cu = self.cnx3.cursor()
2424          ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0]
2425 -        cnx3.commit()
2426 +        self.cnx3.commit()
2427          lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0)
2428          self.assertEqual(lc.absolute_url(), 'http://testing.fr/cubicweb/card/eid/%s' % lc.eid)
2429          cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid})
2430 -        cnx3.commit()
2431 +        self.cnx3.commit()
2432 
2433      def test_crossed_relation_noeid_needattr(self):
2434          """http://www.cubicweb.org/ticket/1382452"""
2435          aff1 = self.sexecute('INSERT Affaire X: X ref "AFFREF"')[0][0]
2436          # link within extern source
diff --git a/server/test/unittest_querier.py b/server/test/unittest_querier.py
@@ -25,13 +25,13 @@
2437 
2438  from cubicweb import QueryError, Unauthorized, Binary
2439  from cubicweb.server.sqlutils import SQL_PREFIX
2440  from cubicweb.server.utils import crypt_password
2441  from cubicweb.server.sources.native import make_schema
2442 -from cubicweb.devtools import init_test_database
2443 +from cubicweb.devtools import get_test_db_handler, TestServerConfiguration
2444 +
2445  from cubicweb.devtools.repotest import tuplify, BaseQuerierTC
2446 -
2447  from unittest_session import Variable
2448 
2449 
2450  # register priority/severity sorting registered procedure
2451  from rql.utils import register_function, FunctionDescr
@@ -62,11 +62,14 @@
2452                            ('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'}))
2453 
2454 
2455  def setUpModule(*args):
2456      global repo, cnx
2457 -    repo, cnx = init_test_database(apphome=UtilsTC.datadir)
2458 +    config = TestServerConfiguration(apphome=UtilsTC.datadir)
2459 +    handler = get_test_db_handler(config)
2460 +    handler.build_db_cache()
2461 +    repo, cnx = handler.get_repo_and_cnx()
2462 
2463  def tearDownModule(*args):
2464      global repo, cnx
2465      cnx.close()
2466      repo.shutdown()
@@ -744,11 +747,11 @@
2467          rset = self.execute("Any D WHERE X name in ('bidule', 'toto') , X creation_date D")
2468          self.assert_(isinstance(rset.rows[0][0], datetime), rset.rows)
2469          rset = self.execute('Tag X WHERE X creation_date TODAY')
2470          self.assertEqual(len(rset.rows), 2)
2471          rset = self.execute('Any MAX(D) WHERE X is Tag, X creation_date D')
2472 -        self.failUnless(isinstance(rset[0][0], datetime), type(rset[0][0]))
2473 +        self.failUnless(isinstance(rset[0][0], datetime), (rset[0][0], type(rset[0][0])))
2474 
2475      def test_today(self):
2476          self.execute("INSERT Tag X: X name 'bidule', X creation_date TODAY")
2477          self.execute("INSERT Tag Y: Y name 'toto'")
2478          rset = self.execute('Tag X WHERE X creation_date TODAY')
diff --git a/server/test/unittest_rql2sql.py b/server/test/unittest_rql2sql.py
@@ -16,10 +16,11 @@
2479  # You should have received a copy of the GNU Lesser General Public License along
2480  # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
2481  """unit tests for module cubicweb.server.sources.rql2sql"""
2482 
2483  import sys
2484 +import os
2485 
2486  from logilab.common.testlib import TestCase, unittest_main, mock_object
2487 
2488  from rql import BadRQLQuery
2489  from rql.utils import register_function, FunctionDescr
@@ -35,23 +36,45 @@
2490  try:
2491      register_function(stockproc)
2492  except AssertionError, ex:
2493      pass # already registered
2494 
2495 +from logilab import database as db
2496 +def monkey_patch_import_driver_module(driver, drivers, quiet=True):
2497 +    if not driver in drivers:
2498 +        raise db.UnknownDriver(driver)
2499 +    for modname in drivers[driver]:
2500 +        try:
2501 +            if not quiet:
2502 +                print >> sys.stderr, 'Trying %s' % modname
2503 +            module = db.load_module_from_name(modname, use_sys=False)
2504 +            break
2505 +        except ImportError:
2506 +            if not quiet:
2507 +                print >> sys.stderr, '%s is not available' % modname
2508 +            continue
2509 +    else:
2510 +        return None, drivers[driver][0]
2511 +    return module, modname
2512 +
2513 
2514  def setUpModule():
2515      global config, schema
2516      config = TestServerConfiguration('data', apphome=CWRQLTC.datadir)
2517      config.bootstrap_cubes()
2518      schema = config.load_schema()
2519      schema['in_state'].inlined = True
2520      schema['state_of'].inlined = False
2521      schema['comments'].inlined = False
2522 +    db._backup_import_driver_module = db._import_driver_module
2523 +    db._import_driver_module = monkey_patch_import_driver_module
2524 
2525  def tearDownModule():
2526      global config, schema
2527      del config, schema
2528 +    db._import_driver_module = db._backup_import_driver_module
2529 +    del db._backup_import_driver_module
2530 
2531  PARSER = [
2532      (r"Personne P WHERE P nom 'Zig\'oto';",
2533       '''SELECT _P.cw_eid
2534  FROM cw_Personne AS _P
@@ -91,16 +114,10 @@
2535  WHERE _P.cw_test=FALSE'''),
2536 
2537      ("Personne P WHERE P eid -1",
2538       '''SELECT -1'''),
2539 
2540 -    ("Personne P LIMIT 20 OFFSET 10",
2541 -     '''SELECT _P.cw_eid
2542 -FROM cw_Personne AS _P
2543 -LIMIT 20
2544 -OFFSET 10'''),
2545 -
2546      ("Personne P WHERE S is Societe, P travaille S, S nom 'Logilab';",
2547       '''SELECT rel_travaille0.eid_from
2548  FROM cw_Societe AS _S, travaille_relation AS rel_travaille0
2549  WHERE rel_travaille0.eid_to=_S.cw_eid AND _S.cw_nom=Logilab'''),
2550 
@@ -184,10 +201,18 @@
2551       '''SELECT _X.cw_eid
2552  FROM cw_Card AS _X
2553  WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)''')
2554  ]
2555 
2556 +BASIC_WITH_LIMIT = [
2557 +    ("Personne P LIMIT 20 OFFSET 10",
2558 +     '''SELECT _P.cw_eid
2559 +FROM cw_Personne AS _P
2560 +LIMIT 20
2561 +OFFSET 10'''),
2562 +    ]
2563 +
2564 
2565  ADVANCED = [
2566      ("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'",
2567       '''SELECT _S.cw_eid
2568  FROM cw_Societe AS _S
@@ -277,16 +302,10 @@
2569      ('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P',
2570       '''SELECT _O.cw_eid
2571  FROM cw_Note AS _S, cw_Personne AS _O
2572  WHERE (_S.cw_ecrit_par IS NULL OR _S.cw_ecrit_par!=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''),
2573 
2574 -    ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI',
2575 -     '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1
2576 -FROM cw_Note AS _S
2577 -WHERE _S.cw_ecrit_par IS NULL
2578 -ORDER BY 2) AS T1'''),
2579 -
2580      ('Any N WHERE N todo_by U, N is Note, U eid 2, N filed_under T, T eid 3',
2581       # N would actually be invarient if U eid 2 had given a specific type to U
2582       '''SELECT _N.cw_eid
2583  FROM cw_Note AS _N, filed_under_relation AS rel_filed_under1, todo_by_relation AS rel_todo_by0
2584  WHERE rel_todo_by0.eid_from=_N.cw_eid AND rel_todo_by0.eid_to=2 AND rel_filed_under1.eid_from=_N.cw_eid AND rel_filed_under1.eid_to=3'''),
@@ -331,17 +350,10 @@
2585       '''SELECT _X.cw_eid, 32, MAX(rel_tags0.eid_from)
2586  FROM cw_CWUser AS _X, tags_relation AS rel_tags0
2587  WHERE rel_tags0.eid_to=_X.cw_eid AND _X.cw_in_state=32
2588  GROUP BY _X.cw_eid'''),
2589 
2590 -    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
2591 -     '''SELECT COUNT(rel_concerne0.eid_from), _C.cw_nom
2592 -FROM concerne_relation AS rel_concerne0, cw_Societe AS _C
2593 -WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by2, cw_Card AS _N WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published)))
2594 -GROUP BY _C.cw_nom
2595 -ORDER BY 1 DESC
2596 -LIMIT 10'''),
2597 
2598      ('Any X WHERE Y evaluee X, Y is CWUser',
2599       '''SELECT rel_evaluee0.eid_to
2600  FROM cw_CWUser AS _Y, evaluee_relation AS rel_evaluee0
2601  WHERE rel_evaluee0.eid_from=_Y.cw_eid'''),
@@ -433,17 +445,10 @@
2602       '''SELECT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))), _X.cw_data_name
2603  FROM cw_File AS _X
2604  GROUP BY _X.cw_data_name,_X.cw_data_format
2605  ORDER BY 1,2,_X.cw_data_format'''),
2606 
2607 -    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
2608 -     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2
2609 -FROM cw_File AS _X
2610 -GROUP BY _X.cw_data_name,_X.cw_data_format
2611 -ORDER BY 2,3) AS T1
2612 -'''),
2613 -
2614      # ambiguity in EXISTS() -> should union the sub-query
2615      ('Any T WHERE T is Tag, NOT T name in ("t1", "t2"), EXISTS(T tags X, X is IN (CWUser, CWGroup))',
2616       '''SELECT _T.cw_eid
2617  FROM cw_Tag AS _T
2618  WHERE NOT (_T.cw_name IN(t1, t2)) AND EXISTS(SELECT 1 FROM tags_relation AS rel_tags0, cw_CWGroup AS _X WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid UNION SELECT 1 FROM tags_relation AS rel_tags1, cw_CWUser AS _X WHERE rel_tags1.eid_from=_T.cw_eid AND rel_tags1.eid_to=_X.cw_eid)'''),
@@ -510,10 +515,76 @@
2619  FROM owned_by_relation AS rel_owned_by0
2620  WHERE rel_owned_by0.eid_to=12
2621  GROUP BY rel_owned_by0.eid_to
2622  HAVING COUNT(rel_owned_by0.eid_from)>10'''),
2623 
2624 +
2625 +    ("Any X WHERE X eid 0, X test TRUE",
2626 +     '''SELECT _X.cw_eid
2627 +FROM cw_Personne AS _X
2628 +WHERE _X.cw_eid=0 AND _X.cw_test=TRUE'''),
2629 +
2630 +    ('Any 1 WHERE X in_group G, X is CWUser',
2631 +     '''SELECT 1
2632 +FROM in_group_relation AS rel_in_group0'''),
2633 +
2634 +    ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"',
2635 +     '''SELECT _X.cw_eid
2636 +FROM cw_CWEType AS _X
2637 +WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''),
2638 +    ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"',
2639 +     '''SELECT _X.cw_eid
2640 +FROM cw_CWEType AS _X
2641 +WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''),
2642 +    ]
2643 +
2644 +ADVANCED_WITH_GROUP_CONCAT = [
2645 +        ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN, X is CWGroup",
2646 +     '''SELECT _X.cw_eid, GROUP_CONCAT(_T.cw_name)
2647 +FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
2648 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
2649 +GROUP BY _X.cw_eid,_X.cw_name
2650 +ORDER BY _X.cw_name'''),
2651 +
2652 +    ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN",
2653 +     '''SELECT T1.C0, GROUP_CONCAT(T1.C1) FROM (SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
2654 +FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
2655 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
2656 +UNION ALL
2657 +SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
2658 +FROM cw_State AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
2659 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
2660 +UNION ALL
2661 +SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
2662 +FROM cw_Tag AS _T, cw_Tag AS _X, tags_relation AS rel_tags0
2663 +WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid) AS T1
2664 +GROUP BY T1.C0,T1.C2
2665 +ORDER BY T1.C2'''),
2666 +
2667 +]
2668 +
2669 +ADVANCED_WITH_LIMIT_OR_ORDERBY = [
2670 +    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
2671 +     '''SELECT COUNT(rel_concerne0.eid_from), _C.cw_nom
2672 +FROM concerne_relation AS rel_concerne0, cw_Societe AS _C
2673 +WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by2, cw_Card AS _N WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published)))
2674 +GROUP BY _C.cw_nom
2675 +ORDER BY 1 DESC
2676 +LIMIT 10'''),
2677 +    ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI',
2678 +     '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1
2679 +FROM cw_Note AS _S
2680 +WHERE _S.cw_ecrit_par IS NULL
2681 +ORDER BY 2) AS T1'''),
2682 +
2683 +    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
2684 +     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2
2685 +FROM cw_File AS _X
2686 +GROUP BY _X.cw_data_name,_X.cw_data_format
2687 +ORDER BY 2,3) AS T1
2688 +'''),
2689 +
2690      ('DISTINCT Any X ORDERBY stockproc(X) WHERE U login X',
2691       '''SELECT T1.C0 FROM (SELECT DISTINCT _U.cw_login AS C0, STOCKPROC(_U.cw_login) AS C1
2692  FROM cw_CWUser AS _U
2693  ORDER BY 2) AS T1'''),
2694 
@@ -544,52 +615,12 @@
2695  FROM cw_EmailAddress AS _O
2696  WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests))
2697  ORDER BY 4 DESC'''),
2698 
2699 
2700 -    ("Any X WHERE X eid 0, X test TRUE",
2701 -     '''SELECT _X.cw_eid
2702 -FROM cw_Personne AS _X
2703 -WHERE _X.cw_eid=0 AND _X.cw_test=TRUE'''),
2704 -
2705 -    ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN, X is CWGroup",
2706 -     '''SELECT _X.cw_eid, GROUP_CONCAT(_T.cw_name)
2707 -FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
2708 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
2709 -GROUP BY _X.cw_eid,_X.cw_name
2710 -ORDER BY _X.cw_name'''),
2711 -
2712 -    ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN",
2713 -     '''SELECT T1.C0, GROUP_CONCAT(T1.C1) FROM (SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
2714 -FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
2715 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
2716 -UNION ALL
2717 -SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
2718 -FROM cw_State AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
2719 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
2720 -UNION ALL
2721 -SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
2722 -FROM cw_Tag AS _T, cw_Tag AS _X, tags_relation AS rel_tags0
2723 -WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid) AS T1
2724 -GROUP BY T1.C0,T1.C2
2725 -ORDER BY T1.C2'''),
2726 -
2727 -    ('Any 1 WHERE X in_group G, X is CWUser',
2728 -     '''SELECT 1
2729 -FROM in_group_relation AS rel_in_group0'''),
2730 -
2731 -    ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"',
2732 -     '''SELECT _X.cw_eid
2733 -FROM cw_CWEType AS _X
2734 -WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''),
2735 -    ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"',
2736 -     '''SELECT _X.cw_eid
2737 -FROM cw_CWEType AS _X
2738 -WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''),
2739      ]
2740 
2741 -
2742  MULTIPLE_SEL = [
2743      ("DISTINCT Any X,Y where P is Personne, P nom X , P prenom Y;",
2744       '''SELECT DISTINCT _P.cw_nom, _P.cw_prenom
2745  FROM cw_Personne AS _P'''),
2746      ("Any X,Y where P is Personne, P nom X , P prenom Y, not P nom NULL;",
@@ -710,16 +741,43 @@
2747 
2748      ('Any S WHERE NOT(X in_state S, S name "somename"), X is CWUser',
2749       '''SELECT _S.cw_eid
2750  FROM cw_State AS _S
2751  WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid AND _S.cw_name=somename))'''),
2752 +    ]
2753 +
2754 +HAS_TEXT_LG_INDEXER = [
2755 +            ('Any X WHERE X has_text "toto tata"',
2756 +             """SELECT DISTINCT appears0.uid
2757 +FROM appears AS appears0
2758 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""),
2759 +            ('Personne X WHERE X has_text "toto tata"',
2760 +             """SELECT DISTINCT _X.eid
2761 +FROM appears AS appears0, entities AS _X
2762 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.eid AND _X.type='Personne'"""),
2763 +            ('Personne X WHERE X has_text %(text)s',
2764 +             """SELECT DISTINCT _X.eid
2765 +FROM appears AS appears0, entities AS _X
2766 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo')) AND appears0.uid=_X.eid AND _X.type='Personne'
2767 +"""),
2768 +            ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)',
2769 +             """SELECT DISTINCT _X.cw_eid
2770 +FROM appears AS appears0, cw_Basket AS _X
2771 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu
2772 +UNION
2773 +SELECT DISTINCT _X.cw_eid
2774 +FROM appears AS appears0, cw_Folder AS _X
2775 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu""")
2776 +        ]
2777 +
2778 +
2779 
2780  # XXXFIXME fail
2781  #         ('Any X,RT WHERE X relation_type RT?, NOT X is CWAttribute',
2782  #      '''SELECT _X.cw_eid, _X.cw_relation_type
2783  # FROM cw_CWRelation AS _X'''),
2784 -]
2785 +
2786 
2787  OUTER_JOIN = [
2788      ('Any X,S WHERE X travaille S?',
2789       '''SELECT _X.cw_eid, rel_travaille0.eid_to
2790  FROM cw_Personne AS _X LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=_X.cw_eid)'''
@@ -963,24 +1021,26 @@
2791       '''SELECT DISTINCT _X.cw_eid
2792  FROM connait_relation AS rel_connait0, cw_Personne AS _P, cw_Personne AS _X
2793  WHERE (rel_connait0.eid_from=_X.cw_eid AND rel_connait0.eid_to=_P.cw_eid OR rel_connait0.eid_to=_X.cw_eid AND rel_connait0.eid_from=_P.cw_eid) AND _P.cw_nom=nom'''
2794      ),
2795 
2796 -    ('Any X ORDERBY X DESC LIMIT 9 WHERE E eid 0, E connait X',
2797 +    ('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"',
2798 +     '''SELECT DISTINCT _P.cw_eid
2799 +FROM connait_relation AS rel_connait0, cw_Personne AS _P, cw_Personne AS _S
2800 +WHERE (rel_connait0.eid_from=_P.cw_eid AND rel_connait0.eid_to=_S.cw_eid OR rel_connait0.eid_to=_P.cw_eid AND rel_connait0.eid_from=_S.cw_eid) AND _S.cw_nom=chouette'''
2801 +     )
2802 +    ]
2803 +
2804 +SYMMETRIC_WITH_LIMIT = [
2805 +        ('Any X ORDERBY X DESC LIMIT 9 WHERE E eid 0, E connait X',
2806      '''SELECT DISTINCT _X.cw_eid
2807  FROM connait_relation AS rel_connait0, cw_Personne AS _X
2808  WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=_X.cw_eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=_X.cw_eid)
2809  ORDER BY 1 DESC
2810  LIMIT 9'''
2811       ),
2812 -
2813 -    ('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"',
2814 -     '''SELECT DISTINCT _P.cw_eid
2815 -FROM connait_relation AS rel_connait0, cw_Personne AS _P, cw_Personne AS _S
2816 -WHERE (rel_connait0.eid_from=_P.cw_eid AND rel_connait0.eid_to=_S.cw_eid OR rel_connait0.eid_to=_P.cw_eid AND rel_connait0.eid_from=_S.cw_eid) AND _S.cw_nom=chouette'''
2817 -     )
2818 -    ]
2819 +]
2820 
2821  INLINE = [
2822 
2823      ('Any P WHERE N eid 1, N ecrit_par P, NOT P owned_by P2',
2824       '''SELECT _N.cw_ecrit_par
@@ -1242,15 +1302,15 @@
2825      def test_parser_parse(self):
2826          for t in self._parse(PARSER):
2827              yield t
2828 
2829      def test_basic_parse(self):
2830 -        for t in self._parse(BASIC):
2831 +        for t in self._parse(BASIC + BASIC_WITH_LIMIT):
2832              yield t
2833 
2834      def test_advanced_parse(self):
2835 -        for t in self._parse(ADVANCED):
2836 +        for t in self._parse(ADVANCED + ADVANCED_WITH_LIMIT_OR_ORDERBY + ADVANCED_WITH_GROUP_CONCAT):
2837              yield t
2838 
2839      def test_outer_join_parse(self):
2840          for t in self._parse(OUTER_JOIN):
2841              yield t
@@ -1355,11 +1415,11 @@
2842                 ' (Any X WHERE X is Transition))')
2843          rqlst = self._prepare(rql)
2844          self.assertRaises(BadRQLQuery, self.o.generate, rqlst)
2845 
2846      def test_symmetric(self):
2847 -        for t in self._parse(SYMMETRIC):
2848 +        for t in self._parse(SYMMETRIC + SYMMETRIC_WITH_LIMIT):
2849              yield t
2850 
2851      def test_inline(self):
2852          for t in self._parse(INLINE):
2853              yield t
@@ -1507,10 +1567,115 @@
2854          # X without in_group relation, or some G without it.
2855          self._check('Any 1 WHERE NOT X in_group G, X is CWUser',
2856                      '''SELECT 1
2857  WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''')
2858 
2859 +class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC):
2860 +    backend = 'sqlserver2005'
2861 +    def _norm_sql(self, sql):
2862 +        return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ')
2863 +
2864 +    def test_has_text(self):
2865 +        for t in self._parse(HAS_TEXT_LG_INDEXER):
2866 +            yield t
2867 +
2868 +    def test_or_having_fake_terms(self):
2869 +        self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
2870 +                    '''SELECT _X.cw_eid
2871 +FROM cw_CWUser AS _X
2872 +WHERE ((YEAR(_X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''')
2873 +
2874 +    def test_date_extraction(self):
2875 +        self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
2876 +                    '''SELECT MONTH(_P.cw_creation_date)
2877 +FROM cw_Personne AS _P''')
2878 +
2879 +    def test_symmetric(self):
2880 +        for t in self._parse(SYMMETRIC):
2881 +            yield t
2882 +
2883 +    def test_basic_parse(self):
2884 +        for t in self._parse(BASIC):# + BASIC_WITH_LIMIT):
2885 +            yield t
2886 +
2887 +    def test_advanced_parse(self):
2888 +        for t in self._parse(ADVANCED):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
2889 +            yield t
2890 +
2891 +    def test_limit_offset(self):
2892 +        WITH_LIMIT = [
2893 +    ("Personne P LIMIT 20 OFFSET 10",
2894 +             '''WITH orderedrows AS (
2895 +SELECT
2896 +_L01
2897 +, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber
2898 +FROM (
2899 +SELECT _P.cw_eid AS _L01 FROM  cw_Personne AS _P
2900 +) AS _SQ1 )
2901 +SELECT
2902 +_L01
2903 +FROM orderedrows WHERE
2904 +__RowNumber <= 30 AND __RowNumber > 10
2905 + '''),
2906 +
2907 +    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
2908 +     '''WITH orderedrows AS (
2909 +SELECT
2910 +_L01, _L02
2911 +, ROW_NUMBER() OVER (ORDER BY _L01 DESC) AS __RowNumber
2912 +FROM (
2913 +SELECT COUNT(rel_concerne0.eid_from) AS _L01, _C.cw_nom AS _L02 FROM  concerne_relation AS rel_concerne0, cw_Societe AS _C
2914 +WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by2, cw_Card AS _N WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published)))
2915 +GROUP BY _C.cw_nom
2916 +) AS _SQ1 )
2917 +SELECT
2918 +_L01, _L02
2919 +FROM orderedrows WHERE
2920 +__RowNumber <= 10
2921 +     '''),
2922 +
2923 +    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
2924 +     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2
2925 +FROM cw_File AS _X
2926 +GROUP BY _X.cw_data_name,_X.cw_data_format) AS T1
2927 +ORDER BY T1.C1,T1.C2
2928 +'''),
2929 +
2930 +
2931 +    ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y',
2932 +     '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _X.cw_login AS C1
2933 +FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_CWUser AS _X
2934 +WHERE rel_bookmarked_by0.eid_to=_X.cw_eid) AS T1
2935 +ORDER BY T1.C1
2936 + '''),
2937 +
2938 +    ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN',
2939 +     '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1
2940 +FROM cw_Affaire AS _X, cw_State AS _S
2941 +WHERE _X.cw_in_state=_S.cw_eid
2942 +UNION
2943 +SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1
2944 +FROM cw_CWUser AS _X, cw_State AS _S
2945 +WHERE _X.cw_in_state=_S.cw_eid
2946 +UNION
2947 +SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1
2948 +FROM cw_Note AS _X, cw_State AS _S
2949 +WHERE _X.cw_in_state=_S.cw_eid) AS T1
2950 +ORDER BY T1.C1'''),
2951 +
2952 +    ('Any O,AA,AB,AC ORDERBY AC DESC '
2953 +     'WHERE NOT S use_email O, S eid 1, O is EmailAddress, O address AA, O alias AB, O modification_date AC, '
2954 +     'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2',
2955 +     '''
2956 +SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date
2957 +FROM cw_EmailAddress AS _O
2958 +WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests))
2959 +ORDER BY 4 DESC'''),
2960 +            ]
2961 +        for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
2962 +            yield t
2963 +
2964 
2965 
2966  class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC):
2967      backend = 'sqlite'
2968 
diff --git a/server/test/unittest_security.py b/server/test/unittest_security.py
@@ -25,20 +25,19 @@
2969  from cubicweb import Unauthorized, ValidationError, QueryError
2970  from cubicweb.server.querier import check_read_access
2971 
2972  class BaseSecurityTC(CubicWebTC):
2973 
2974 -    def setUp(self):
2975 -        CubicWebTC.setUp(self)
2976 +    def setup_database(self):
2977 +        super(BaseSecurityTC, self).setup_database()
2978          self.create_user('iaminusersgrouponly')
2979 -        self.readoriggroups = self.schema['Personne'].permissions['read']
2980 -        self.addoriggroups = self.schema['Personne'].permissions['add']
2981 -
2982 -    def tearDown(self):
2983 -        CubicWebTC.tearDown(self)
2984 -        self.schema['Personne'].set_action_permissions('read', self.readoriggroups)
2985 -        self.schema['Personne'].set_action_permissions('add', self.addoriggroups)
2986 +        readoriggroups = self.schema['Personne'].permissions['read']
2987 +        addoriggroups = self.schema['Personne'].permissions['add']
2988 +        def fix_perm():
2989 +            self.schema['Personne'].set_action_permissions('read', readoriggroups)
2990 +            self.schema['Personne'].set_action_permissions('add', addoriggroups)
2991 +        self.addCleanup(fix_perm)
2992 
2993 
2994  class LowLevelSecurityFunctionTC(BaseSecurityTC):
2995 
2996      def test_check_read_access(self):
@@ -73,21 +72,22 @@
2997              return []
2998          self.repo.system_source.syntax_tree_search = syntax_tree_search
2999 
3000      def tearDown(self):
3001          self.repo.system_source.__dict__.pop('syntax_tree_search', None)
3002 -        BaseSecurityTC.tearDown(self)
3003 +        super(SecurityRewritingTC, self).tearDown()
3004 
3005      def test_not_relation_read_security(self):
3006          cnx = self.login('iaminusersgrouponly')
3007          self.hijack_source_execute()
3008          self.execute('Any U WHERE NOT A todo_by U, A is Affaire')
3009          self.assertEqual(self.query[0][1].as_string(),
3010                            'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
3011          self.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
3012          self.assertEqual(self.query[0][1].as_string(),
3013                            'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
3014 +        cnx.close()
3015 
3016  class SecurityTC(BaseSecurityTC):
3017 
3018      def setUp(self):
3019          BaseSecurityTC.setUp(self)
@@ -102,10 +102,11 @@
3020          cnx = self.login('anon')
3021          cu = cnx.cursor()
3022          cu.execute("INSERT Personne X: X nom 'bidule'")
3023          self.assertRaises(Unauthorized, cnx.commit)
3024          self.assertEqual(cu.execute('Personne X').rowcount, 1)
3025 +        cnx.close()
3026 
3027      def test_insert_rql_permission(self):
3028          # test user can only add une affaire related to a societe he owns
3029          cnx = self.login('iaminusersgrouponly')
3030          cu = cnx.cursor()
@@ -118,10 +119,11 @@
3031          cu = cnx.cursor()
3032          cu.execute("INSERT Affaire X: X sujet 'cool'")
3033          cu.execute("INSERT Societe X: X nom 'chouette'")
3034          cu.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'")
3035          cnx.commit()
3036 +        cnx.close()
3037 
3038      def test_update_security_1(self):
3039          cnx = self.login('anon')
3040          cu = cnx.cursor()
3041          # local security check
@@ -145,10 +147,11 @@
3042          cnx = self.login('iaminusersgrouponly')
3043          cu = cnx.cursor()
3044          cu.execute("INSERT Personne X: X nom 'biduuule'")
3045          cu.execute("INSERT Societe X: X nom 'looogilab'")
3046          cu.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'")
3047 +        cnx.close()
3048 
3049      def test_update_rql_permission(self):
3050          self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
3051          self.commit()
3052          # test user can only update une affaire related to a societe he owns
@@ -163,10 +166,11 @@
3053          cu.execute("INSERT Affaire X: X sujet 'pascool'")
3054          cu.execute("INSERT Societe X: X nom 'chouette'")
3055          cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
3056          cu.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'")
3057          cnx.commit()
3058 +        cnx.close()
3059 
3060      def test_delete_security(self):
3061          # FIXME: sample below fails because we don't detect "owner" can't delete
3062          # user anyway, and since no user with login == 'bidule' exists, no
3063          # exception is raised
@@ -175,10 +179,11 @@
3064          #                  self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'")
3065          # check local security
3066          cnx = self.login('iaminusersgrouponly')
3067          cu = cnx.cursor()
3068          self.assertRaises(Unauthorized, cu.execute, "DELETE CWGroup Y WHERE Y name 'staff'")
3069 +        cnx.close()
3070 
3071      def test_delete_rql_permission(self):
3072          self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
3073          self.commit()
3074          # test user can only dele une affaire related to a societe he owns
@@ -198,10 +203,11 @@
3075  ##         # this one should fail since it will try to delete two affaires, one authorized
3076  ##         # and the other not
3077  ##         self.assertRaises(Unauthorized, cu.execute, "DELETE Affaire X")
3078          cu.execute("DELETE Affaire X WHERE X sujet 'pascool'")
3079          cnx.commit()
3080 +        cnx.close()
3081 
3082 
3083      def test_insert_relation_rql_permission(self):
3084          cnx = self.login('iaminusersgrouponly')
3085          session = self.session
@@ -223,10 +229,11 @@
3086          # test nothing has actually been inserted:
3087          self.assertEqual(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe').rowcount, 0)
3088          cu.execute("INSERT Societe X: X nom 'chouette'")
3089          cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
3090          cnx.commit()
3091 +        cnx.close()
3092 
3093      def test_delete_relation_rql_permission(self):
3094          self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
3095          self.commit()
3096          cnx = self.login('iaminusersgrouponly')
@@ -247,10 +254,11 @@
3097          cnx.rollback() # required after Unauthorized
3098          cu.execute("INSERT Societe X: X nom 'chouette'")
3099          cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
3100          cnx.commit()
3101          cu.execute("DELETE A concerne S WHERE S nom 'chouette'")
3102 +        cnx.close()
3103 
3104 
3105      def test_user_can_change_its_upassword(self):
3106          ueid = self.create_user('user').eid
3107          cnx = self.login('user')
@@ -258,27 +266,30 @@
3108          cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
3109                     {'x': ueid, 'passwd': 'newpwd'})
3110          cnx.commit()
3111          cnx.close()
3112          cnx = self.login('user', password='newpwd')
3113 +        cnx.close()
3114 
3115      def test_user_cant_change_other_upassword(self):
3116          ueid = self.create_user('otheruser').eid
3117          cnx = self.login('iaminusersgrouponly')
3118          cu = cnx.cursor()
3119          cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
3120                     {'x': ueid, 'passwd': 'newpwd'})
3121          self.assertRaises(Unauthorized, cnx.commit)
3122 +        cnx.close()
3123 
3124      # read security test
3125 
3126      def test_read_base(self):
3127          self.schema['Personne'].set_action_permissions('read', ('users', 'managers'))
3128          cnx = self.login('anon')
3129          cu = cnx.cursor()
3130          self.assertRaises(Unauthorized,
3131                            cu.execute, 'Personne U where U nom "managers"')
3132 +        cnx.close()
3133 
3134      def test_read_erqlexpr_base(self):
3135          eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
3136          self.commit()
3137          cnx = self.login('iaminusersgrouponly')
@@ -299,10 +310,11 @@
3138          self.assertEqual(rset.rows, [[aff2]])
3139          rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2})
3140          self.assertEqual(rset.rows, [])
3141          # test can't update an attribute of an entity that can't be readen
3142          self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid})
3143 +        cnx.close()
3144 
3145 
3146      def test_entity_created_in_transaction(self):
3147          affschema = self.schema['Affaire']
3148          origperms = affschema.permissions['read']
@@ -335,10 +347,11 @@
3149          self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
3150          self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}))
3151          rset = cu.execute("Any X WHERE X has_text 'cool'")
3152          self.assertEqual(sorted(eid for eid, in rset.rows),
3153                            [card1, aff2])
3154 +        cnx.close()
3155 
3156      def test_read_erqlexpr_has_text2(self):
3157          self.execute("INSERT Personne X: X nom 'bidule'")
3158          self.execute("INSERT Societe X: X nom 'bidule'")
3159          self.commit()
@@ -347,20 +360,22 @@
3160          cu = cnx.cursor()
3161          rset = cu.execute('Any N WHERE N has_text "bidule"')
3162          self.assertEqual(len(rset.rows), 1, rset.rows)
3163          rset = cu.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")')
3164          self.assertEqual(len(rset.rows), 1, rset.rows)
3165 +        cnx.close()
3166 
3167      def test_read_erqlexpr_optional_rel(self):
3168          self.execute("INSERT Personne X: X nom 'bidule'")
3169          self.execute("INSERT Societe X: X nom 'bidule'")
3170          self.commit()
3171          self.schema['Personne'].set_action_permissions('read', ('managers',))
3172          cnx = self.login('anon')
3173          cu = cnx.cursor()
3174          rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?')
3175          self.assertEqual(len(rset.rows), 1, rset.rows)
3176 +        cnx.close()
3177 
3178      def test_read_erqlexpr_aggregat(self):
3179          self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
3180          self.commit()
3181          cnx = self.login('iaminusersgrouponly')
@@ -380,10 +395,11 @@
3182          rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN WITH X BEING ((Affaire X) UNION (Societe X))')
3183          self.assertEqual(len(rset), 2)
3184          values = dict(rset)
3185          self.assertEqual(values['Affaire'], 1)
3186          self.assertEqual(values['Societe'], 2)
3187 +        cnx.close()
3188 
3189 
3190      def test_attribute_security(self):
3191          # only managers should be able to edit the 'test' attribute of Personne entities
3192          eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0]
@@ -427,10 +443,11 @@
3193          self.assertRaises(Unauthorized, cnx.commit)
3194          note2.cw_adapt_to('IWorkflowable').fire_transition('redoit')
3195          cnx.commit()
3196          cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
3197          cnx.commit()
3198 +        cnx.close()
3199 
3200      def test_attribute_read_security(self):
3201          # anon not allowed to see users'login, but they can see users
3202          self.repo.schema['CWUser'].set_action_permissions('read', ('guests', 'users', 'managers'))
3203          self.repo.schema['CWUser'].rdef('login').set_action_permissions('read', ('users', 'managers'))
@@ -444,10 +461,11 @@
3204          x = rset.get_entity(1, 0)
3205          x.complete()
3206          self.assertEqual(x.login, None)
3207          self.failUnless(x.creation_date)
3208          cnx.rollback()
3209 +        cnx.close()
3210 
3211  class BaseSchemaSecurityTC(BaseSecurityTC):
3212      """tests related to the base schema permission configuration"""
3213 
3214      def test_user_can_delete_object_he_created(self):
@@ -470,10 +488,11 @@
3215          cnx = self.login('iaminusersgrouponly')
3216          cu = cnx.cursor()
3217          cu.execute('DELETE Affaire X WHERE X ref "ARCT01"')
3218          cnx.commit()
3219          self.failIf(cu.execute('Affaire X'))
3220 +        cnx.close()
3221 
3222      def test_users_and_groups_non_readable_by_guests(self):
3223          cnx = self.login('anon')
3224          anon = cnx.user(self.session)
3225          cu = cnx.cursor()
@@ -496,26 +515,29 @@
3226          rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid})
3227          self.assertEqual(rset.rows, [[anon.eid]])
3228          # but can't modify it
3229          cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
3230          self.assertRaises(Unauthorized, cnx.commit)
3231 +        cnx.close()
3232 
3233      def test_in_group_relation(self):
3234          cnx = self.login('iaminusersgrouponly')
3235          cu = cnx.cursor()
3236          rql = u"DELETE U in_group G WHERE U login 'admin'"
3237          self.assertRaises(Unauthorized, cu.execute, rql)
3238          rql = u"SET U in_group G WHERE U login 'admin', G name 'users'"
3239          self.assertRaises(Unauthorized, cu.execute, rql)
3240 +        cnx.close()
3241 
3242      def test_owned_by(self):
3243          self.execute("INSERT Personne X: X nom 'bidule'")
3244          self.commit()
3245          cnx = self.login('iaminusersgrouponly')
3246          cu = cnx.cursor()
3247          rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne"
3248          self.assertRaises(Unauthorized, cu.execute, rql)
3249 +        cnx.close()
3250 
3251      def test_bookmarked_by_guests_security(self):
3252          beid1 = self.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0]
3253          beid2 = self.execute('INSERT Bookmark B: B path "?vid=index", B title "index", B bookmarked_by U WHERE U login "anon"')[0][0]
3254          self.commit()
@@ -533,17 +555,19 @@
3255                            [[beid1]])
3256          self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U')
3257          self.assertRaises(Unauthorized,
3258                            cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
3259                            {'x': anoneid, 'b': beid1})
3260 +        cnx.close()
3261 
3262 
3263      def test_ambigous_ordered(self):
3264          cnx = self.login('anon')
3265          cu = cnx.cursor()
3266          names = [t for t, in cu.execute('Any N ORDERBY lower(N) WHERE X name N')]
3267          self.assertEqual(names, sorted(names, key=lambda x: x.lower()))
3268 +        cnx.close()
3269 
3270      def test_in_state_without_update_perm(self):
3271          """check a user change in_state without having update permission on the
3272          subject
3273          """
@@ -573,10 +597,11 @@
3274                                user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate')
3275          finally:
3276              # restore orig perms
3277              for action, perms in affaire_perms.iteritems():
3278                  self.schema['Affaire'].set_action_permissions(action, perms)
3279 +        cnx.close()
3280 
3281      def test_trinfo_security(self):
3282          aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0)
3283          iworkflowable = aff.cw_adapt_to('IWorkflowable')
3284          self.commit()
diff --git a/test/unittest_migration.py b/test/unittest_migration.py
@@ -95,20 +95,20 @@
3285                                 ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py'),
3286                                 ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')])
3287          config.__class__.name = 'repository'
3288 
3289 
3290 -from cubicweb.devtools import ApptestConfiguration, init_test_database, cleanup_sqlite
3291 +from cubicweb.devtools import ApptestConfiguration, get_test_db_handler
3292 
3293  class BaseCreationTC(TestCase):
3294 
3295      def test_db_creation(self):
3296          """make sure database can be created"""
3297          config = ApptestConfiguration('data', apphome=self.datadir)
3298          source = config.sources()['system']
3299          self.assertEqual(source['db-driver'], 'sqlite')
3300 -        cleanup_sqlite(source['db-name'], removetemplate=True)
3301 -        init_test_database(config=config)
3302 +        handler = get_test_db_handler(config)
3303 +        handler.init_test_database()
3304 
3305 
3306  if __name__ == '__main__':
3307      unittest_main()
diff --git a/vregistry.py b/vregistry.py
@@ -235,10 +235,14 @@
3308          # return the result of calling the appobject
3309          return winners[0](*args, **kwargs)
3310 
3311      select_best = deprecated('[3.6] select_best is now private')(_select_best)
3312 
3313 +    # these are overridden by set_log_methods below
3314 +    # only defining here to prevent pylint from complaining
3315 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
3316 +
3317 
3318  class VRegistry(dict):
3319      """class responsible to register, propose and select the various
3320      elements used to build the web interface. Currently, we have templates,
3321      views, actions and components.
@@ -515,10 +519,13 @@
3322          except Exception, ex:
3323              if self.config.mode in ('test', 'dev'):
3324                  raise
3325              self.exception('appobject %s registration failed: %s',
3326                             appobjectcls, ex)
3327 +    # these are overridden by set_log_methods below
3328 +    # only defining here to prevent pylint from complaining
3329 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
3330 
3331 
3332  # init logging
3333  set_log_methods(VRegistry, getLogger('cubicweb.vreg'))
3334  set_log_methods(Registry, getLogger('cubicweb.registry'))
diff --git a/web/application.py b/web/application.py
@@ -232,10 +232,13 @@
3335          self.session_manager.close_session(req.session)
3336          sessioncookie = self.session_cookie(req)
3337          req.remove_cookie(req.get_cookie(), sessioncookie)
3338          raise LogOut(url=goto_url)
3339 
3340 +    # these are overridden by set_log_methods below
3341 +    # only defining here to prevent pylint from complaining
3342 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
3343 
3344  class CubicWebPublisher(object):
3345      """the publisher is a singleton hold by the web frontend, and is responsible
3346      to publish HTTP request.
3347      """
@@ -456,8 +459,11 @@
3348          template = req.form.get('__template', req.property_value('ui.main-template'))
3349          if template not in self.vreg['views']:
3350              template = 'main-template'
3351          return template
3352 
3353 +    # these are overridden by set_log_methods below
3354 +    # only defining here to prevent pylint from complaining
3355 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
3356 
3357  set_log_methods(CubicWebPublisher, LOGGER)
3358  set_log_methods(CookieSessionHandler, LOGGER)
diff --git a/web/data/cubicweb.css b/web/data/cubicweb.css
@@ -1,8 +1,8 @@
3359  /*
3360   *  :organization: Logilab
3361 - *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3362 + *  :copyright: 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3363   *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
3364   */
3365 
3366  /***************************************/
3367  /* xhtml tags                          */
@@ -218,11 +218,11 @@
3368  /***************************************/
3369 
3370  /* header */
3371 
3372  table#header {
3373 -  background: %(headerBgColor)s url("banner.png") repeat-x top left;
3374 +  background: %(headerBg)s;
3375    width: 100%;
3376  }
3377 
3378  table#header td {
3379    vertical-align: middle;
diff --git a/web/data/cubicweb.old.css b/web/data/cubicweb.old.css
@@ -1,8 +1,8 @@
3380  /*
3381   *  :organization: Logilab
3382 - *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3383 + *  :copyright: 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3384   *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
3385   */
3386 
3387  /***************************************/
3388  /* xhtml tags                          */
@@ -225,11 +225,11 @@
3389  /***************************************/
3390 
3391  /* header */
3392 
3393  table#header {
3394 -  background: #ff7700 url("banner.png") left top repeat-x;
3395 +  background: %(headerBg)s;
3396    width: 100%;
3397  }
3398 
3399  table#header td {
3400    vertical-align: middle;
diff --git a/web/data/uiprops.py b/web/data/uiprops.py
@@ -73,10 +73,11 @@
3401  # XXX
3402  defaultLayoutMargin = '8px'
3403 
3404  # header
3405  headerBgColor = '#ff7700'
3406 +headerBg = lazystr('%(headerBgColor)s url("banner.png") repeat-x top left')
3407 
3408  # h
3409  h1FontSize = '1.5em' # 18px
3410  h1Padding = '0 0 0.14em 0 '
3411  h1Margin = '0.8em 0 0.5em'
diff --git a/web/propertysheet.py b/web/propertysheet.py
@@ -113,8 +113,12 @@
3412              return adirectory
3413 
3414      def compile(self, content):
3415          return self._percent_rgx.sub('%%', content) % self
3416 
3417 +    # these are overridden by set_log_methods below
3418 +    # only defining here to prevent pylint from complaining
3419 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
3420 +
3421  from cubicweb.web import LOGGER
3422  from logilab.common.logging_ext import set_log_methods
3423  set_log_methods(PropertySheet, LOGGER)
diff --git a/web/views/actions.py b/web/views/actions.py
@@ -1,6 +1,6 @@
3424 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3425 +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3426  # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
3427  #
3428  # This file is part of CubicWeb.
3429  #
3430  # CubicWeb is free software: you can redistribute it and/or modify it under the
@@ -26,11 +26,11 @@
3431 
3432  from cubicweb.schema import display_name
3433  from cubicweb.appobject import objectify_selector
3434  from cubicweb.selectors import (EntitySelector, yes,
3435      one_line_rset, multi_lines_rset, one_etype_rset, relation_possible,
3436 -    nonempty_rset, non_final_entity,
3437 +    nonempty_rset, non_final_entity, score_entity,
3438      authenticated_user, match_user_groups, match_search_state,
3439      has_permission, has_add_permission, is_instance, debug_mode,
3440      )
3441  from cubicweb.web import uicfg, controller, action
3442  from cubicweb.web.views import linksearch_select_url, vid_from_rset
@@ -320,11 +320,11 @@
3443 
3444  class ViewSameCWEType(action.Action):
3445      """when displaying the schema of a CWEType, offer to list entities of that type
3446      """
3447      __regid__ = 'entitiesoftype'
3448 -    __select__ = one_line_rset() & is_instance('CWEType')
3449 +    __select__ = one_line_rset() & is_instance('CWEType') & score_entity(lambda x: not x.final)
3450      category = 'mainactions'
3451      order = 40
3452 
3453      @property
3454      def etype(self):
diff --git a/web/views/basecomponents.py b/web/views/basecomponents.py
@@ -1,6 +1,6 @@
3455 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3456 +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3457  # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
3458  #
3459  # This file is part of CubicWeb.
3460  #
3461  # CubicWeb is free software: you can redistribute it and/or modify it under the
@@ -97,11 +97,13 @@
3462 
3463  class ApplicationName(HeaderComponent):
3464      """display the instance name"""
3465      __regid__ = 'appliname'
3466 
3467 -    def render(self, w):
3468 +    # XXX support kwargs for compat with other components which gets the view as
3469 +    # argument
3470 +    def render(self, w, **kwargs):
3471          title = self._cw.property_value('ui.site-title')
3472          if title:
3473              w(u'<span id="appliName"><a href="%s">%s</a></span>' % (
3474                  self._cw.base_url(), xml_escape(title)))
3475 
diff --git a/web/views/basetemplates.py b/web/views/basetemplates.py
@@ -72,11 +72,11 @@
3476 
3477      def content(self, w):
3478          # FIXME Deprecated code ?
3479          msg = self._cw._('you have been logged out')
3480          w(u'<h2>%s</h2>\n' % msg)
3481 -        if self._cw.vreg.config['anonymous-user']:
3482 +        if self._cw.vreg.config.anonymous_user()[0]:
3483              indexurl = self._cw.build_url('view', vid='index', __message=msg)
3484              w(u'<p><a href="%s">%s</a><p>' % (
3485                  xml_escape(indexurl),
3486                  self._cw._('go back to the index page')))
3487 
diff --git a/web/views/ibreadcrumbs.py b/web/views/ibreadcrumbs.py
@@ -1,6 +1,6 @@
3488 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3489 +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
3490  # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
3491  #
3492  # This file is part of CubicWeb.
3493  #
3494  # CubicWeb is free software: you can redistribute it and/or modify it under the
@@ -115,11 +115,13 @@
3495      context = basecomponents.ApplicationName.context
3496      separator = u'&#160;&gt;&#160;'
3497      link_template = u'<a href="%s">%s</a>'
3498      first_separator = True
3499 
3500 -    def render(self, w):
3501 +    # XXX support kwargs for compat with other components which gets the view as
3502 +    # argument
3503 +    def render(self, w, **kwargs):
3504          entity = self.cw_rset.get_entity(0, 0)
3505          adapter = ibreadcrumb_adapter(entity)
3506          view = self.cw_extra_kwargs.get('view')
3507          path = adapter.breadcrumbs(view)
3508          if path:
@@ -176,11 +178,13 @@
3509 
3510 
3511  class BreadCrumbAnyRSetVComponent(BreadCrumbEntityVComponent):
3512      __select__ = basecomponents.HeaderComponent.__select__ & any_rset()
3513 
3514 -    def render(self, w):
3515 +    # XXX support kwargs for compat with other components which gets the view as
3516 +    # argument
3517 +    def render(self, w, **kwargs):
3518          w(u'<span id="breadcrumbs" class="pathbar">')
3519          if self.first_separator:
3520              w(self.separator)
3521          w(self._cw._('search'))
3522          w(u'</span>')
@@ -190,11 +194,11 @@
3523      __regid__ = 'breadcrumbs'
3524 
3525      def cell_call(self, row, col, **kwargs):
3526          entity = self.cw_rset.get_entity(row, col)
3527          desc = xml_escape(uilib.cut(entity.dc_description(), 50))
3528 -        # XXX remember camember : tags.a autoescapes !
3529 +        # NOTE remember camember: tags.a autoescapes
3530          self.w(tags.a(entity.view('breadcrumbtext'),
3531                        href=entity.absolute_url(), title=desc))
3532 
3533 
3534  class BreadCrumbTextView(EntityView):
diff --git a/wsgi/handler.py b/wsgi/handler.py
@@ -191,9 +191,13 @@
3535              content = self.appli.loggedout_content(req)
3536          else:
3537              content = self.appli.need_login_content(req)
3538          return WSGIResponse(code, req, content)
3539 
3540 +    # these are overridden by set_log_methods below
3541 +    # only defining here to prevent pylint from complaining
3542 +    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
3543 +
3544 
3545  from logging import getLogger
3546  from cubicweb import set_log_methods
3547  set_log_methods(CubicWebWSGIApplication, getLogger('cubicweb.wsgi'))