changeset 0:2a82b8aa4da7

Initial commit - add license - add run script - add config parser - add service object - add web_service - add workflow - add osv/orm/fields - add module management - add ir module
author ced@b2ck.com
date Mon, 10 Dec 2007 01:04:13 +0100
parents
children aff5b241b140
files LICENSE trytond.py trytond/__init__.py trytond/config.py trytond/init.sql trytond/ir/__init__.py trytond/ir/__terp__.py trytond/ir/actions.py trytond/ir/attachment.py trytond/ir/cron.py trytond/ir/exports.py trytond/ir/model.py trytond/ir/rule.py trytond/ir/sequence.py trytond/ir/translation.py trytond/ir/ui/__init__.py trytond/ir/ui/menu.py trytond/ir/ui/view.py trytond/ir/values.py trytond/module.py trytond/netsvc.py trytond/osv/__init__.py trytond/osv/fields.py trytond/osv/orm.py trytond/osv/osv.py trytond/pooler.py trytond/report/__init__.py trytond/res/__init__.py trytond/security.py trytond/sql_db.py trytond/tiny_socket.py trytond/tools/__init__.py trytond/tools/convert.py trytond/tools/misc.py trytond/tools/translate.py trytond/trytond.py trytond/version.py trytond/web_service/__init__.py trytond/web_service/common.py trytond/web_service/db.py trytond/web_service/object.py trytond/web_service/report.py trytond/web_service/wizard.py trytond/workflow/__init__.py trytond/workflow/instance.py trytond/workflow/wkf_expr.py trytond/workflow/wkf_service.py trytond/workflow/workitem.py
diffstat 46 files changed, 9648 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/LICENSE	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,340 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+                       51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+import trytond
+import time
+trytond.TrytonServer().run()
+while True:
+    time.sleep(1)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,1 @@
+from trytond import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/config.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,250 @@
+import ConfigParser, optparse, os, sys
+import logging
+from version import VERSION
+from netsvc import LOG_CRITICAL, LOG_ERROR, LOG_WARNING
+from netsvc import LOG_INFO, LOG_DEBUG
+
+
+class ConfigManager(object):
+    def __init__(self, fname=None):
+        self.options = {
+            'verbose': False,
+            'interface': '',
+            'port': '8069',
+            'netinterface': '',
+            'netport': '8070',
+            'db_host': False,
+            'db_port': False,
+            'db_name': False,
+            'db_user': False,
+            'db_password': False,
+            'db_maxconn': 64,
+            'reportgz': False,
+            'netrpc': True,
+            'xmlrpc': True,
+            'soap': False,
+            'pg_path': None,
+            'admin_passwd': 'admin',
+            'debug_mode': False,
+            'pidfile': None,
+            'logfile': None,
+            'secure': False,
+            'smtp_server': 'localhost',
+            'smtp_user': False,
+            'smtp_password': False,
+            'stop_after_init': False,
+            'price_accuracy': 2,
+            'assert_exit_level': logging.WARNING,
+        }
+
+        assert_exit_levels = (
+                LOG_CRITICAL,
+                LOG_ERROR,
+                LOG_WARNING,
+                LOG_INFO,
+                LOG_DEBUG,
+                )
+
+        parser = optparse.OptionParser(version=VERSION)
+
+        parser.add_option("-c", "--config", dest="config",
+                help="specify alternate config file")
+        parser.add_option("-s", "--save", action="store_true", dest="save",
+                default=False, help="save configuration to ~/.terp_serverrc")
+        parser.add_option("-v", "--verbose", action="store_true",
+                dest="verbose", default=False, help="enable debugging")
+        parser.add_option("--pidfile", dest="pidfile",
+                help="file where the server pid will be stored")
+        parser.add_option("--logfile", dest="logfile",
+                help="file where the server log will be stored")
+        parser.add_option("-n", "--interface", dest="interface",
+                help="specify the TCP IP address")
+        parser.add_option("-p", "--port", dest="port",
+                help="specify the TCP port")
+        parser.add_option("--net_interface", dest="netinterface",
+                help="specify the TCP IP address for netrpc")
+        parser.add_option("--net_port", dest="netport",
+                help="specify the TCP port for netrpc")
+        parser.add_option("--no-netrpc", dest="netrpc", action="store_false",
+                default=True, help="disable netrpc")
+        parser.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false",
+                default=True, help="disable xmlrpc")
+        parser.add_option("-i", "--init", dest="init",
+                help="init a module (use \"all\" for all modules)")
+        parser.add_option("--without-demo", dest="without_demo",
+                help="load demo data for a module " \
+                        "(use \"all\" for all modules)", default=False)
+        parser.add_option("-u", "--update", dest="update",
+                help="update a module (use \"all\" for all modules)")
+        parser.add_option("--stop-after-init", action="store_true",
+                dest="stop_after_init", default=False,
+                help="stop the server after it initializes")
+        parser.add_option('--debug', dest='debug_mode', action='store_true',
+                default=False, help='enable debug mode')
+        parser.add_option("--assert-exit-level", dest='assert_exit_level',
+                help="specify the level at which a failed assertion will " \
+                        "stop the server " + str(assert_exit_levels))
+        parser.add_option("-S", "--secure", dest="secure", action="store_true",
+                help="launch server over https instead of http", default=False)
+        parser.add_option('--smtp', dest='smtp_server', default='',
+                help='specify the SMTP server for sending email')
+        parser.add_option('--smtp-user', dest='smtp_user', default='',
+                help='specify the SMTP username for sending email')
+        parser.add_option('--smtp-password', dest='smtp_password', default='',
+                help='specify the SMTP password for sending email')
+        parser.add_option('--price_accuracy', dest='price_accuracy',
+                default='2', help='specify the price accuracy')
+
+        group = optparse.OptionGroup(parser, "Modules related options")
+        group = optparse.OptionGroup(parser, "Database related options")
+        group.add_option("-d", "--database", dest="db_name",
+                help="specify the database name")
+        group.add_option("-r", "--db_user", dest="db_user",
+                help="specify the database user name")
+        group.add_option("-w", "--db_password", dest="db_password",
+                help="specify the database password")
+        group.add_option("--pg_path", dest="pg_path",
+                help="specify the pg executable path")
+        group.add_option("--db_host", dest="db_host",
+                help="specify the database host")
+        group.add_option("--db_port", dest="db_port",
+                help="specify the database port")
+        group.add_option("--db_maxconn", dest="db_maxconn", default='64',
+                help="specify the the maximum number of physical " \
+                        "connections to posgresql")
+        parser.add_option_group(group)
+
+        (opt, args) = parser.parse_args()
+
+        # place/search the config file on Win32 near the server installation
+        # (../etc from the server)
+        # if the server is run by an unprivileged user,
+        # he has to specify location of a config file 
+        # where he has the rights to write,
+        # else he won't be able to save the configurations,
+        # or even to start the server...
+        if os.name == 'nt':
+            rcfilepath = os.path.join(os.path.abspath(
+                os.path.dirname(sys.argv[0])), 'tinyerp-server.conf')
+        else:
+            rcfilepath = os.path.expanduser('~/.terp_serverrc')
+
+        self.rcfile = fname or opt.config \
+                or os.environ.get('TERP_SERVER') or rcfilepath
+        self.load()
+
+        # Verify that we want to log or not, if not the output will go to stdout
+        if self.options['logfile'] in ('None', 'False'):
+            self.options['logfile'] = False
+        # the same for the pidfile
+        if self.options['pidfile'] in ('None', 'False'):
+            self.options['pidfile'] = False
+
+        for arg in (
+                'interface',
+                'port',
+                'db_name',
+                'db_user',
+                'db_password',
+                'db_host',
+                'db_port',
+                'logfile',
+                'pidfile',
+                'secure',
+                'smtp_server',
+                'smtp_user',
+                'smtp_password',
+                'price_accuracy',
+                'netinterface',
+                'netport',
+                'db_maxconn',
+                ):
+            if getattr(opt, arg):
+                self.options[arg] = getattr(opt, arg)
+
+        for arg in (
+                'verbose',
+                'debug_mode',
+                'stop_after_init',
+                'without_demo',
+                'netrpc',
+                'xmlrpc',
+                ):
+            self.options[arg] = getattr(opt, arg)
+
+        if opt.assert_exit_level:
+            assert opt.assert_exit_level in assert_exit_levels, \
+                    'ERROR: The assert-exit-level must be one ' \
+                    'of those values: '+str(assert_exit_levels)
+            self.options['assert_exit_level'] = getattr(logging,
+                    opt.assert_exit_level.upper())
+
+        init = {}
+        if opt.init:
+            for i in opt.init.split(','):
+                init[i] = 1
+        self.options['init'] = init
+        self.options["demo"] = not opt.without_demo \
+                and self.options['init'] or {}
+
+        update = {}
+        if opt.update:
+            for i in opt.update.split(','):
+                update[i] = 1
+        self.options['update'] = update
+
+        if opt.pg_path:
+            self.options['pg_path'] = opt.pg_path
+
+        if opt.save:
+            self.save()
+
+    def load(self):
+        parser = ConfigParser.ConfigParser()
+        try:
+            parser.read([self.rcfile])
+            for (name, value) in parser.items('options'):
+                if value == 'True' or value == 'true':
+                    value = True
+                if value == 'False' or value == 'false':
+                    value = False
+                self.options[name] = value
+        except IOError:
+            pass
+        except ConfigParser.NoSectionError:
+            pass
+
+    def save(self):
+        parser = ConfigParser.ConfigParser()
+        parser.add_section('options')
+        for opt in [opt for opt in self.options.keys() \
+                if opt not in (
+                    'version',
+                    'init',
+                    'update',
+                    )]:
+            parser.set('options', opt, self.options[opt])
+
+        # try to create the directories and write the file
+        try:
+            if not os.path.exists(os.path.dirname(self.rcfile)):
+                os.makedirs(os.path.dirname(self.rcfile))
+            try:
+                parser.write(file(self.rcfile, 'w'))
+            except IOError:
+                sys.stderr.write("ERROR: couldn't write the config file\n")
+
+        except OSError:
+            # what to do if impossible?
+            sys.stderr.write("ERROR: couldn't create the config directory\n")
+
+    def get(self, key, default=None):
+        return self.options.get(key, default)
+
+    def __setitem__(self, key, value):
+        self.options[key] = value
+
+    def __getitem__(self, key):
+        return self.options[key]
+
+CONFIG = ConfigManager()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/init.sql	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,323 @@
+-------------------------------------------------------------------------
+-- Pure SQL
+-------------------------------------------------------------------------
+
+CREATE TABLE inherit (
+    obj_type varchar(128) not null,
+    obj_id int not null,
+    inst_type varchar(128) not null,
+    inst_id int not null
+);
+
+-------------------------------------------------------------------------
+-- IR dictionary
+-------------------------------------------------------------------------
+
+create table ir_values
+(
+    id serial,
+    name varchar(128) not null,
+    key varchar(128) not null,
+    key2 varchar(128) not null,
+    model varchar(128) not null,
+    value text,
+    meta text default NULL,
+    res_id integer default null,
+    primary key (id)
+);
+
+-------------------------------------------------------------------------
+-- Modules Description
+-------------------------------------------------------------------------
+
+CREATE TABLE ir_model (
+  id serial,
+  model varchar(64) DEFAULT ''::varchar NOT NULL,
+  name varchar(64),
+  info text,
+  primary key(id)
+);
+
+CREATE TABLE ir_model_fields (
+  id serial,
+  model varchar(64) DEFAULT ''::varchar NOT NULL,
+  model_id int references ir_model,
+  name varchar(64) DEFAULT ''::varchar NOT NULL,
+  relation varchar(64),
+  field_description varchar(256),
+  ttype varchar(64),
+  group_name varchar(64),
+  view_load boolean,
+  relate boolean default False,
+  primary key(id)
+);
+
+
+-------------------------------------------------------------------------
+-- Actions
+-------------------------------------------------------------------------
+
+CREATE TABLE ir_actions (
+    id serial NOT NULL,
+    name varchar(64) DEFAULT ''::varchar NOT NULL,
+    "type" varchar(64) DEFAULT 'window'::varchar NOT NULL,
+    usage varchar(32) DEFAULT null,
+    primary key(id)
+);
+
+CREATE TABLE ir_act_window (
+    view_id integer,
+    res_model varchar(64),
+    view_type varchar(16),
+    "domain" varchar(127),
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+CREATE TABLE ir_act_report_xml (
+    model varchar(64) NOT NULL,
+    report_name varchar(64) NOT NULL,
+    report_xsl varchar(64),
+    report_xml varchar(64),
+    auto boolean default true,
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+create table ir_act_report_custom (
+    report_id int,
+--  report_id int references ir_report_custom
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+CREATE TABLE ir_act_group (
+    exec_type varchar(64) DEFAULT 'serial'::varchar NOT NULL,
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+CREATE TABLE ir_act_group_link (
+    aid integer NOT NULL,
+    gid integer NOT NULL
+);
+
+CREATE TABLE ir_act_execute (
+    func_name varchar(64) NOT NULL,
+    func_arg varchar(64),
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+CREATE TABLE ir_act_wizard (
+    wiz_name varchar(64) NOT NULL,
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+CREATE TABLE ir_act_url (
+    url text NOT NULL,
+    target varchar(64) NOT NULL,
+    primary key(id)
+)
+INHERITS (ir_actions);
+
+CREATE TABLE ir_ui_view (
+	id serial NOT NULL,
+	name varchar(64) DEFAULT ''::varchar NOT NULL,
+	model varchar(64) DEFAULT ''::varchar NOT NULL,
+	"type" varchar(64) DEFAULT 'form'::varchar NOT NULL,
+	arch text NOT NULL,
+	field_parent varchar(64),
+	priority integer DEFAULT 5 NOT NULL,
+	primary key(id)
+);
+
+CREATE TABLE ir_ui_menu (
+	id serial NOT NULL,
+	parent_id int references ir_ui_menu on delete set null,
+	name varchar(64) DEFAULT ''::varchar NOT NULL,
+	icon varchar(64) DEFAULT ''::varchar,
+	primary key (id)
+);
+
+select setval('ir_ui_menu_id_seq', 2);
+
+---------------------------------
+-- Res users
+---------------------------------
+
+-- level:
+--   0  RESTRICT TO USER
+--   1  RESTRICT TO GROUP
+--   2  PUBLIC
+
+CREATE TABLE res_users (
+    id serial NOT NULL,
+    name varchar(64) not null,
+    active boolean default True,
+    login varchar(64) NOT NULL UNIQUE,
+    password varchar(32) default null,
+--  action_id int references ir_act_window on delete set null,
+    action_id int,
+    primary key(id)
+);
+alter table res_users add constraint res_users_login_uniq unique (login);
+
+insert into res_users (id,login,password,name,action_id,active) values (1,'root',NULL,'Root',NULL,False);
+select setval('res_users_id_seq', 2);
+
+CREATE TABLE res_groups (
+    id serial NOT NULL,
+    name varchar(32) NOT NULL,
+    primary key(id)
+);
+
+create table res_roles (
+    id serial NOT NULL,
+    parent_id int references res_roles on delete set null,
+    name varchar(32) NOT NULL,
+    primary key(id)
+);
+
+CREATE TABLE res_roles_users_rel (
+	uid integer NOT NULL references res_users on delete cascade,
+	rid integer NOT NULL references res_roles on delete cascade
+);
+
+CREATE TABLE res_groups_users_rel (
+	uid integer NOT NULL references res_users on delete cascade,
+	gid integer NOT NULL references res_groups on delete cascade
+);
+
+---------------------------------
+-- Workflows
+---------------------------------
+
+create table wkf
+(
+    id serial,
+    name varchar(64),
+    osv varchar(64),
+    on_create bool default False,
+    primary key(id)
+);
+
+create table wkf_activity
+(
+    id serial,
+    wkf_id int references wkf on delete cascade,
+    subflow_id int references wkf on delete set null,
+    split_mode varchar(3) default 'XOR',
+    join_mode varchar(3) default 'XOR',
+    kind varchar(16) not null default 'dummy',
+    name varchar(64),
+    signal_send varchar(32) default null,
+    flow_start boolean default False,
+    flow_stop boolean default False,
+    action varchar(64) default null,
+    primary key(id)
+);
+
+create table wkf_transition
+(
+    id serial,
+    act_from int references wkf_activity on delete cascade,
+    act_to int references wkf_activity on delete cascade,
+    condition varchar(128) default NULL,
+
+    trigger_type varchar(128) default NULL,
+    trigger_expr_id varchar(128) default NULL,
+
+    signal varchar(64) default null,
+    role_id int references res_roles on delete set null,
+
+    primary key(id)
+);
+
+create table wkf_instance
+(
+    id serial,
+    wkf_id int references wkf on delete set null,
+    uid int default null,
+    res_id int not null,
+    res_type varchar(64) not null,
+    state varchar(32) not null default 'active',
+    primary key(id)
+);
+
+create table wkf_workitem
+(
+    id serial,
+    act_id int not null references wkf_activity on delete cascade,
+    inst_id int not null references wkf_instance on delete cascade,
+    subflow_id int references wkf_instance on delete cascade,
+    state varchar(64) default 'blocked',
+    primary key(id)
+);
+
+create table wkf_witm_trans
+(
+    trans_id int not null references wkf_transition on delete cascade,
+    inst_id int not null references wkf_instance on delete cascade
+);
+
+create table wkf_logs
+(
+    id serial,
+    res_type varchar(128) not null,
+    res_id int not null,
+    uid int references res_users on delete set null,
+    act_id int references wkf_activity on delete set null,
+    time time not null,
+    info varchar(128) default NULL,
+    primary key(id)
+);
+
+---------------------------------
+-- Modules
+---------------------------------
+
+CREATE TABLE ir_module_category (
+    id serial NOT NULL,
+    create_uid integer references res_users on delete set null,
+    create_date timestamp without time zone,
+    write_date timestamp without time zone,
+    write_uid integer references res_users on delete set null,
+    parent_id integer REFERENCES ir_module_category ON DELETE SET NULL,
+    name character varying(128) NOT NULL,
+    primary key(id)
+);
+
+
+CREATE TABLE ir_module_module (
+    id serial NOT NULL,
+    create_uid integer references res_users on delete set null,
+    create_date timestamp without time zone,
+    write_date timestamp without time zone,
+    write_uid integer references res_users on delete set null,
+    website character varying(256),
+    name character varying(128) NOT NULL,
+    author character varying(128),
+    url character varying(128),
+    state character varying(16),
+    latest_version character varying(64),
+    shortdesc character varying(256),
+    category_id integer REFERENCES ir_module_category ON DELETE SET NULL,
+    description text,
+    demo boolean default False,
+    primary key(id)
+);
+ALTER TABLE ir_module_module add constraint name_uniq unique (name);
+
+CREATE TABLE ir_module_module_dependency (
+    id serial NOT NULL,
+    create_uid integer references res_users on delete set null,
+    create_date timestamp without time zone,
+    write_date timestamp without time zone,
+    write_uid integer references res_users on delete set null,
+    name character varying(128),
+    version_pattern character varying(128) default NULL,
+    module_id integer REFERENCES ir_module_module ON DELETE cascade,
+    primary key(id)
+);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,10 @@
+from sequence import *
+import ui
+from actions import *
+from model import *
+from attachment import *
+from cron import *
+from values import *
+from translation import *
+from exports import *
+from rule import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/__terp__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,4 @@
+{
+    'name': 'IR',
+    'active': True,
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/actions.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,279 @@
+"Actions"
+from trytond.osv import fields, OSV
+from trytond.tools import file_open
+
+
+class Actions(OSV):
+    "Actions"
+    _name = 'ir.actions.actions'
+    _table = 'ir_actions'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Action Name', required=True, size=64),
+        'type': fields.char('Action Type', required=True, size=32),
+        'usage': fields.char('Action Usage', size=32)
+    }
+    _defaults = {
+        'usage': lambda *a: False,
+    }
+
+Actions()
+
+
+class ActionsExecute(OSV):
+    "Actions execute"
+    _name = 'ir.actions.execute'
+    _table = 'ir_act_execute'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('name', size=64, required=True, translate=True),
+        'type': fields.char('type', size=32, required=True),
+        'func_name': fields.char('Function Name', size=64, required=True),
+        'func_arg': fields.char('Function Argument', size=64),
+        'usage': fields.char('Action Usage', size=32)
+    }
+
+ActionsExecute()
+
+
+class ActionsGroup(OSV):
+    "Actions group"
+    _name = 'ir.actions.group'
+    _table = 'ir_act_group'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Group Name', size=64, required=True),
+        'type': fields.char('Action Type', size=32, required=True),
+        'exec_type': fields.char('Execution sequence', size=64, required=True),
+        'usage': fields.char('Action Usage', size=32)
+    }
+
+ActionsGroup()
+
+
+class ActionsReportCustom(OSV):
+    "Actions report custom"
+    _name = 'ir.actions.report.custom'
+    _table = 'ir_act_report_custom'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Report Name', size=64, required=True,
+            translate=True),
+        'type': fields.char('Report Type', size=32, required=True),
+        'model':fields.char('Model', size=64, required=True),
+        'report_id': fields.integer('Report Ref.', required=True),
+        'usage': fields.char('Action Usage', size=32),
+        'multi': fields.boolean('On multiple doc.',
+            help="If set to true, the action will not be displayed " \
+                    "on the right toolbar of a form views.")
+    }
+    _defaults = {
+        'multi': lambda *a: False,
+    }
+
+ActionsReportCustom()
+
+
+class ActionsReportXML(OSV):
+    "Actions report xml"
+
+    def _report_content(self, cursor, user, ids, name, arg, context=None):
+        res = {}
+        for report in self.browse(cursor, user, ids, context=context):
+            data = report[name + '_data']
+            if not data and report[name[:-8]]:
+                try:
+                    data = file_open(report[name[:-8]], mode='rb').read()
+                except:
+                    data = False
+            res[report.id] = data
+        return res
+
+    def _report_content_inv(self, cursor, user, obj_id, name, value, arg,
+            context=None):
+        self.write(cursor, user, obj_id, {name+'_data': value}, context=context)
+
+    def _report_sxw(self, cursor, user, ids, name, arg, context=None):
+        res = {}
+        for report in self.browse(cursor, user, ids, context=context):
+            if report.report_rml:
+                res[report.id] = report.report_rml.replace('.rml', '.sxw')
+            else:
+                res[report.id] = False
+        return res
+
+    _name = 'ir.actions.report.xml'
+    _table = 'ir_act_report_xml'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Name', size=64, required=True, translate=True),
+        'type': fields.char('Report Type', size=32, required=True),
+        'model': fields.char('Model', size=64, required=True),
+        'report_name': fields.char('Internal Name', size=64, required=True),
+        'report_xsl': fields.char('XSL path', size=256),
+        'report_xml': fields.char('XML path', size=256),
+        'report_rml': fields.char('RML path', size=256,
+            help="The .rml path of the file or NULL \n" \
+                    "if the content is in report_rml_content"),
+        'report_sxw': fields.function(_report_sxw, method=True, type='char',
+            string='SXW path'),
+        'report_sxw_content_data': fields.binary('SXW content'),
+        'report_rml_content_data': fields.binary('RML content'),
+        'report_sxw_content': fields.function(_report_content,
+            fnct_inv=_report_content_inv, method=True,
+            type='binary', string='SXW content',),
+        'report_rml_content': fields.function(_report_content,
+            fnct_inv=_report_content_inv, method=True,
+            type='binary', string='RML content'),
+        'auto': fields.boolean('Automatic XSL:RML', required=True),
+        'usage': fields.char('Action Usage', size=32),
+        'header': fields.boolean('Add RML header',
+            help="Add or not the coporate RML header"),
+        'multi': fields.boolean('On multiple doc.',
+            help="If set to true, the action will not be displayed " \
+                    "on the right toolbar of a form views.")
+    }
+    _defaults = {
+        'type': lambda *a: 'ir.actions.report.xml',
+        'multi': lambda *a: False,
+        'auto': lambda *a: True,
+        'header': lambda *a: True,
+        'report_sxw_content': lambda *a: False,
+    }
+
+ActionsReportXML()
+
+
+class ActionsActWindow(OSV):
+    "Actions act window"
+    _name = 'ir.actions.act_window'
+    _table = 'ir_act_window'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+
+    def _views_get_fnc(self, cursor, user, ids, name, arg, context=None):
+        res = {}
+        for act in self.browse(cursor, user, ids, context=context):
+            res[act.id] = [(view.view_id.id, view.view_mode) \
+                    for view in act.view_ids]
+            if (not act.view_ids):
+                modes = act.view_mode.split(',')
+                find = False
+                if act.view_id.id:
+                    res[act.id].append((act.view_id.id, act.view_id.type))
+                for mode in modes:
+                    if act.view_id and (mode == act.view_id.type) and not find:
+                        find = True
+                        continue
+                    res[act.id].append((False, mode))
+        return res
+
+    _columns = {
+        'name': fields.char('Action Name', size=64, translate=True),
+        'type': fields.char('Action Type', size=32, required=True),
+        'view_id': fields.many2one('ir.ui.view', 'View Ref.',
+            ondelete='cascade'),
+        'domain': fields.char('Domain Value', size=250),
+        'context': fields.char('Context Value', size=250),
+        'res_model': fields.char('Model', size=64),
+        'src_model': fields.char('Source model', size=64),
+        'view_type': fields.selection([('tree','Tree'), ('form','Form')],
+            string='Type of view'),
+        'view_mode': fields.char('Mode of view', size=250),
+        'usage': fields.char('Action Usage', size=32),
+        'view_ids': fields.one2many('ir.actions.act_window.view',
+            'act_window_id', 'Views'),
+        'views': fields.function(_views_get_fnc, method=True, type='binary',
+            string='Views'),
+        'limit': fields.integer('Limit',
+            help='Default limit for the list view'),
+        'auto_refresh': fields.integer('Auto-Refresh',
+            help='Add an auto-refresh on the view'),
+    }
+    _defaults = {
+        'type': lambda *a: 'ir.actions.act_window',
+        'view_type': lambda *a: 'form',
+        'view_mode': lambda *a: 'tree,form',
+        'context': lambda *a: '{}',
+        'limit': lambda *a: 80,
+        'auto_refresh': lambda *a: 0,
+    }
+
+ActionsActWindow()
+
+
+class ActionsActWindowView(OSV):
+    "Actions act window view"
+    _name = 'ir.actions.act_window.view'
+    _table = 'ir_act_window_view'
+    _rec_name = 'view_id'
+    _description = __doc__
+    _columns = {
+        'sequence': fields.integer('Sequence'),
+        'view_id': fields.many2one('ir.ui.view', 'View'),
+        'view_mode': fields.selection((
+            ('tree', 'Tree'),
+            ('form', 'Form'),
+            ('graph', 'Graph'),
+            ('calendar', 'Calendar')), string='Type of view', required=True),
+        'act_window_id': fields.many2one('ir.actions.act_window', 'Action'),
+        'multi': fields.boolean('On multiple doc.',
+            help="If set to true, the action will not be displayed \n" \
+                    "on the right toolbar of a form views."),
+    }
+    _defaults = {
+        'multi': lambda *a: False,
+    }
+    _order = 'sequence'
+
+ActionsActWindowView()
+
+
+class ActionsWizard(OSV):
+    "Actions wizard"
+    _name = 'ir.actions.wizard'
+    _table = 'ir_act_wizard'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Wizard info', size=64, required=True,
+            translate=True),
+        'type': fields.char('Action type', size=32, required=True),
+        'wiz_name': fields.char('Wizard name', size=64, required=True),
+        'multi': fields.boolean('Action on multiple doc.',
+            help="If set to true, the wizard will not be displayed \n" \
+                    "on the right toolbar of a form views.")
+    }
+    _defaults = {
+        'type': lambda *a: 'ir.actions.wizard',
+        'multi': lambda *a: False,
+    }
+
+ActionsWizard()
+
+
+class ActionsURL(OSV):
+    "Actions URL"
+    _name = 'ir.actions.url'
+    _table = 'ir_act_url'
+    _sequence = 'ir_actions_id_seq'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Action Name', size=64, translate=True),
+        'type': fields.char('Action Type', size=32, required=True),
+        'url': fields.text('Action Url',required=True),
+        'target': fields.selection([
+            ('new', 'New Window'),
+            ('self', 'This Window'),
+            ], 'Action Target', required=True)
+    }
+    _defaults = {
+        'type': lambda *a: 'ir.actions.act_url',
+        'target': lambda *a: 'new',
+    }
+
+ActionsURL()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/attachment.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,19 @@
+"Attachment"
+from trytond.osv import fields, OSV
+
+class Attachment(OSV):
+    "Attachment"
+    _name = 'ir.attachment'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Attachment Name',size=64, required=True),
+        'datas': fields.binary('Data'),
+        'datas_fname': fields.char('Data Filename',size=64),
+        'description': fields.text('Description'),
+        # Not required due to the document module !
+        'res_model': fields.char('Resource Model',size=64, readonly=True),
+        'res_id': fields.integer('Resource ID', readonly=True),
+        'link': fields.char('Link', size=256)
+    }
+
+Attachment()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/cron.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,118 @@
+"""
+SPEC: Execute "model.function(*eval(args))" periodically
+   date        : date to execute the job or NULL if directly
+   delete_after: delete the ir.cron entry after execution
+   interval_*  : period
+   max_repeat  : number of execution or NULL if endlessly
+"""
+
+from mx import DateTime
+import time
+from trytond import pooler
+from trytond.osv import fields, OSV
+from trytond.netsvc import Agent
+
+NEXT_WAIT = 60
+
+_INTERVALTYPES = {
+    'work_days': lambda interval: DateTime.RelativeDateTime(days=interval),
+    'days': lambda interval: DateTime.RelativeDateTime(days=interval),
+    'hours': lambda interval: DateTime.RelativeDateTime(hours=interval),
+    'weeks': lambda interval: DateTime.RelativeDateTime(days=7*interval),
+    'months': lambda interval: DateTime.RelativeDateTime(months=interval),
+    'minutes': lambda interval: DateTime.RelativeDateTime(minutes=interval),
+}
+
+class Cron(OSV, Agent):
+    "Cron"
+    _name = "ir.cron"
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Name', size=60, required=True),
+        'user_id': fields.many2one('res.users', 'User', required=True),
+        'active': fields.boolean('Active'),
+        'interval_number': fields.integer('Interval Number'),
+        'interval_type': fields.selection( [
+            ('minutes', 'Minutes'),
+            ('hours', 'Hours'),
+            ('days', 'Days'),
+            ('weeks', 'Weeks'),
+            ('months', 'Months'),
+            ], 'Interval Unit'),
+        'numbercall': fields.integer('Number of calls',
+            help='Number of time the function is called,\n' \
+                    'a negative number indicates that the function ' \
+                    'will always be called'),
+        'doall' : fields.boolean('Repeat missed'),
+        'nextcall' : fields.datetime('Next call date', required=True),
+        'model': fields.char('Model', size=64),
+        'function': fields.char('Function', size=64),
+        'args': fields.text('Arguments'),
+        'priority': fields.integer('Priority',
+            help='0=Very Urgent\n10=Not urgent')
+    }
+
+    _defaults = {
+        'nextcall' : lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
+        'priority' : lambda *a: 5,
+        'user_id' : lambda obj,cursor,user,context: user,
+        'interval_number' : lambda *a: 1,
+        'interval_type' : lambda *a: 'months',
+        'numbercall' : lambda *a: 1,
+        'active' : lambda *a: 1,
+        'doall' : lambda *a: 1
+    }
+
+    def _callback(self, cursor, user, model, func, args):
+        args = (args or []) and eval(args)
+        obj = self.pool.get(model)
+        if obj and hasattr(obj, func):
+            fct = getattr(obj, func)
+            fct(cursor, user, *args)
+
+    def pool_jobs(self, db_name, check=False):
+        #TODO Error treatment: exception, request, ... -> send request to user
+        now = DateTime.now()
+        try:
+            cursor = pooler.get_db(db_name).cursor()
+        except:
+            return False
+
+        try:
+            cursor.execute('SELECT * FROM ir_cron ' \
+                    'WHERE numbercall <> 0 ' \
+                        'AND active ' \
+                        'AND nextcall <= now() ' \
+                        'ORDER BY priority')
+            for job in cursor.dictfetchall():
+                nextcall = DateTime.strptime(job['nextcall'],
+                        '%Y-%m-%d %H:%M:%S')
+                numbercall = job['numbercall']
+                done = False
+                while nextcall < now and numbercall:
+                    if numbercall > 0:
+                        numbercall -= 1
+                    if not done or job['doall']:
+                        self._callback(cursor, job['user_id'], job['model'],
+                                job['function'], job['args'])
+                    if numbercall:
+                        nextcall += _INTERVALTYPES[job['interval_type']](
+                                job['interval_number'])
+                    done = True
+                addsql = ''
+                if not numbercall:
+                    addsql = ', active=False'
+                cursor.execute("UPDATE ir_cron SET nextcall = %s, " \
+                            "numbercall = %d" + addsql + " " \
+                            "WHERE id = %d",
+                            (nextcall.strftime('%Y-%m-%d %H:%M:%S'),
+                                numbercall, job['id']))
+                cursor.commit()
+        finally:
+            cursor.close()
+        #TODO improved to do at the min(min(nextcalls), time() + NEWT_WAIT)
+        if not check:
+            self.set_alarm(self.pool_jobs, int(time.time()) + NEXT_WAIT,
+                    [db_name])
+
+Cron()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/exports.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,29 @@
+"Exports"
+from trytond.osv import fields, OSV
+
+
+class Exports(OSV):
+    "Exports"
+    _name = "ir.exports"
+    _description = __doc__
+    _columns = {
+            'name': fields.char('Export name', size=128),
+            'resource': fields.char('Resource', size=128),
+            'export_fields': fields.one2many('ir.exports.line', 'export_id',
+                                             'Export Id'),
+    }
+
+Exports()
+
+
+class ExportsLine(OSV):
+    "Exports line"
+    _name = 'ir.exports.line'
+    _description = __doc__
+    _columns = {
+            'name': fields.char('Field name', size=64),
+            'export_id': fields.many2one('ir.exports', 'Exportation',
+                select=True),
+            }
+
+ExportsLine()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/model.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,359 @@
+"model"
+from trytond.osv import fields, OSV
+from trytond.netsvc import Logger, LocalService, LOG_ERROR, LOG_INFO
+from trytond.osv.orm import except_orm
+from trytond.tools import Cache
+import time
+
+class Model(OSV):
+    "Model"
+    _name = 'ir.model'
+    _rec_name = 'model'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Model name', size=64, translate=True),
+        'model': fields.char('Object name', size=64, required=True),
+        'info': fields.text('Information'),
+        'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields',
+            required=True),
+    }
+    _defaults = {
+        'name': lambda *a: 'No Name',
+    }
+
+Model()
+
+class ModelFields(OSV):
+    "Model fields"
+    _name = 'ir.model.fields'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Name', size=64),
+        'model': fields.char('Model Name', size=64, required=True),
+        'relation': fields.char('Model Relation', size=64),
+        'model_id': fields.many2one('ir.model', 'Model id', required=True,
+            select=True),
+        'field_description': fields.char('Field Description', size=256),
+        'ttype': fields.char('Field Type', size=64),
+        'relate': fields.boolean('Click and Relate'),
+
+        'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel',
+            'field_id', 'group_id', 'Groups'),
+        'group_name': fields.char('Group Name', size=128),
+        'view_load': fields.boolean('View Auto-Load'),
+    }
+    _defaults = {
+        'relate': lambda *a: 0,
+        'view_load': lambda *a: 0,
+        'name': lambda *a: 'No Name',
+        'field_description': lambda *a: 'No description available',
+    }
+    _order = "id"
+
+ModelFields()
+
+
+class ModelAccess(OSV):
+    "Model access"
+    _name = 'ir.model.access'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Name', size=64, required=True),
+        'model_id': fields.many2one('ir.model', 'Model', required=True),
+        'group_id': fields.many2one('res.groups', 'Group'),
+        'perm_read': fields.boolean('Read Access'),
+        'perm_write': fields.boolean('Write Access'),
+        'perm_create': fields.boolean('Create Access'),
+        'perm_unlink': fields.boolean('Delete Permission'),
+    }
+
+    def check(self, cursor, user, model_name, mode='read',
+            raise_exception=True):
+        assert mode in ['read', 'write', 'create', 'unlink'], \
+                'Invalid access mode for security'
+        if user == 1:
+            return True
+        cursor.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
+            'FROM ir_model_access a '
+                'JOIN ir_model m '
+                    'ON (a.model_id=m.id) '
+                'JOIN res_groups_users_rel gu '
+                    'ON (gu.gid = a.group_id) '
+            'WHERE m.model = %s AND gu.user = %s', (model_name, user,))
+        row = cursor.fetchall()
+        if row[0][0] == None:
+            cursor.execute('SELECT ' \
+                        'MAX(CASE WHEN perm_' + mode + ' THEN 1 else 0 END) ' \
+                    'FROM ir_model_access a ' \
+                    'JOIN ir_model m ' \
+                        'ON (a.model_id = m.id) ' \
+                    'WHERE a.group_id IS NULL AND m.model = %s', (model_name,))
+            row = cursor.fetchall()
+            if row[0][0] == None:
+                return True
+
+        if not row[0][0]:
+            if raise_exception:
+                if mode == 'read':
+                    raise except_orm('AccessError',
+                            'You can not read this document!')
+                elif mode == 'write':
+                    raise except_orm('AccessError',
+                            'You can not write in this document!')
+                elif mode == 'create':
+                    raise except_orm('AccessError',
+                            'You can not create this kind of document!')
+                elif mode == 'unlink':
+                    raise except_orm('AccessError',
+                            'You can not delete this document!')
+                raise except_orm('AccessError',
+                        'You do not have access to this document!')
+            else:
+                return False
+        return True
+
+    check = Cache()(check)
+
+    # Methods to clean the cache on the Check Method.
+    def write(self, cursor, user, ids, vals, context=None):
+        res = super(ModelAccess, self).write(cursor, user, ids, vals,
+                context=context)
+        self.check()
+        return res
+
+    def create(self, cursor, user, vals, context=None):
+        res = super(ModelAccess, self).create(cursor, user, vals,
+                context=context)
+        self.check()
+        return res
+
+    def unlink(self, cursor, user, ids, context=None):
+        res = super(ModelAccess, self).unlink(cursor, user, ids,
+                context=context)
+        self.check()
+        return res
+
+ModelAccess()
+
+
+class ModelData(OSV):
+    "Model data"
+    _name = 'ir.model.data'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('XML Identifier', required=True, size=64),
+        'model': fields.char('Model', required=True, size=64),
+        'module': fields.char('Module', required=True, size=64),
+        'res_id': fields.integer('Resource ID'),
+        'noupdate': fields.boolean('Non Updatable'),
+        'date_update': fields.datetime('Update Date'),
+        'date_init': fields.datetime('Init Date')
+    }
+    _defaults = {
+        'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
+        'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
+        'noupdate': lambda *a: False
+    }
+
+    def __init__(self, pool):
+        OSV.__init__(self, pool)
+        self.loads = {}
+        self.doinit = True
+        self.unlink_mark = {}
+
+    def _get_id(self, cursor, user, module, xml_id):
+        ids = self.search(cursor, user, [
+            ('module', '=', module),
+            ('name', '=', xml_id),
+            ])
+        assert len(ids)==1, '%d reference(s) to %s. ' \
+                'You should have only one !' % (len(ids),xml_id)
+        return ids[0]
+
+    _get_id = Cache()(_get_id)
+
+    def _update_dummy(self, cursor, user, model, module, xml_id=False):
+        if not xml_id:
+            return False
+        try:
+            obj_id = self.read(cursor, user, self._get_id(cursor, user, module,
+                xml_id), ['res_id'])['res_id']
+            self.loads[(module, xml_id)] = (model, obj_id)
+        except:
+            obj_id = False
+        return obj_id
+
+    def _update(self, cursor, user, model, module, values, xml_id='',
+            noupdate=False, mode='init', res_id=False):
+        model_obj = self.pool.get(model)
+        if xml_id and ('.' in xml_id):
+            assert len(xml_id.split('.')) == 2, '"%s" contains too many dots. '\
+                    'XML ids should not contain dots ! ' \
+                    'These are used to refer to other modules data, ' \
+                    'as in module.reference_id' % (xml_id)
+            module, xml_id = xml_id.split('.')
+        if (not xml_id) and (not self.doinit):
+            return False
+        action_id = False
+        if xml_id:
+            cursor.execute('SELECT id, res_id FROM ir_model_data ' \
+                    'WHERE module = %s AND name = %s', (module,xml_id))
+            results = cursor.fetchall()
+            for action_id2, res_id2 in results:
+                cursor.execute('SELECT id ' \
+                        'FROM ' + self.pool.get(model)._table + ' ' \
+                        'WHERE id = %d', (res_id2,))
+                result3 = cursor.fetchone()
+                if not result3:
+                    cursor.execute('DELETE FROM ir_model_data ' \
+                            'WHERE id = %d', (action_id2,))
+                else:
+                    res_id, action_id = res_id2, action_id2
+
+        if action_id and res_id:
+            model_obj.write(cursor, user, [res_id], values)
+            self.write(cursor, user, [action_id], {
+                'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
+                })
+        elif res_id:
+            model_obj.write(cursor, user, [res_id], values)
+            if xml_id:
+                self.create(cursor, user, {
+                    'name': xml_id,
+                    'model': model,
+                    'module':module,
+                    'res_id':res_id,
+                    'noupdate': noupdate,
+                    })
+                if model_obj._inherits:
+                    for table in model_obj._inherits:
+                        inherit_id = model_obj.browse(cursor, user,
+                                res_id)[model_obj._inherits[table]]
+                        self.create(cursor, user, {
+                            'name': xml_id + '_' + table.replace('.', '_'),
+                            'model': table,
+                            'module': module,
+                            'res_id': inherit_id,
+                            'noupdate': noupdate,
+                            })
+        else:
+            if mode == 'init' or (mode == 'update' and xml_id):
+                res_id = model_obj.create(cursor, user, values)
+                if xml_id:
+                    self.create(cursor, user, {
+                        'name': xml_id,
+                        'model': model,
+                        'module': module,
+                        'res_id': res_id,
+                        'noupdate': noupdate
+                        })
+                    if model_obj._inherits:
+                        for table in model_obj._inherits:
+                            inherit_id = model_obj.browse(cursor, user,
+                                    res_id)[model_obj._inherits[table]]
+                            self.create(cursor, user, {
+                                'name': xml_id + '_' + table.replace('.', '_'),
+                                'model': table,
+                                'module': module,
+                                'res_id': inherit_id,
+                                'noupdate': noupdate,
+                                })
+        if xml_id:
+            if res_id:
+                self.loads[(module, xml_id)] = (model, res_id)
+        return res_id
+
+    def _unlink(self, cursor, user, model, ids, direct=False):
+        #self.pool.get(model).unlink(cursor, user, ids)
+        for obj_id in ids:
+            self.unlink_mark[(model, obj_id)]=False
+            cursor.execute('DELETE FROM ir_model_data ' \
+                    'WHERE res_id = %d AND model = %s', (obj_id, model))
+        return True
+
+    def ir_set(self, cursor, user, key, key2, name, models, value,
+            replace=True, isobject=False, meta=None, xml_id=False):
+        obj = self.pool.get('ir.values')
+        if type(models[0])==type([]) or type(models[0])==type(()):
+            model, res_id = models[0]
+        else:
+            res_id = None
+            model = models[0]
+
+        if res_id:
+            where = ' AND res_id = %d' % (res_id,)
+        else:
+            where = ' AND (res_id IS NULL)'
+
+        if key2:
+            where += ' AND key2 = \'%s\'' % (key2,)
+        else:
+            where += ' AND (key2 IS NULL)'
+
+        cursor.execute('SELECT * FROM ir_values ' \
+                'WHERE model = %s AND key = %s AND name = %s' + where,
+                (model, key, name))
+        res = cursor.fetchone()
+        if not res:
+            res = obj.set(cursor, user, key, key2, name, models, value,
+                    replace, isobject, meta)
+        elif xml_id:
+            cursor.execute('UPDATE ir_values SET value = %s ' \
+                    'WHERE model = %s AND key = %s AND name = %s' + where,
+                    (value, model, key, name))
+        return True
+
+    def _process_end(self, cursor, user, modules):
+        if not modules:
+            return True
+        module_str = ["'%s'" % m for m in modules]
+        cursor.execute('SELECT id, name, model, res_id, module ' \
+                'FROM ir_model_data ' \
+                'WHERE module IN (' + ','.join(module_str) + ') ' \
+                    'AND NOT noupdate')
+        wkf_todo = []
+        for (obj_id, name, model, res_id, module) in cursor.fetchall():
+            if (module, name) not in self.loads:
+                self.unlink_mark[(model, res_id)] = obj_id
+                if model == 'workflow.activity':
+                    cursor.execute('SELECT res_type, res_id ' \
+                            'FROM wkf_instance ' \
+                            'WHERE id IN (' \
+                                'SELECT inst_id FROM wkf_workitem ' \
+                                'WHERE act_id = %d)', (res_id,))
+                    wkf_todo.extend(cursor.fetchall())
+                    cursor.execute("UPDATE wkf_transition " \
+                            "SET condition = 'True', role_id = NULL, " \
+                                "signal = NULL, act_to = act_from, " \
+                                "act_from = %d " \
+                            "WHERE act_to = %d", (res_id, res_id))
+                    cursor.execute("DELETE FROM wkf_transition " \
+                            "WHERE act_to = %d", (res_id,))
+
+        for model, obj_id in wkf_todo:
+            wf_service = LocalService("workflow")
+            wf_service.trg_write(user, model, obj_id, cursor)
+
+        for (model, obj_id) in self.unlink_mark.keys():
+            if self.pool.get(model):
+                logger = Logger()
+                logger.notify_channel('init', LOG_INFO,
+                        'Deleting %s@%s' % (obj_id, model))
+                try:
+                    self.pool.get(model).unlink(cursor, user, [obj_id])
+                    if self.unlink_mark[(model, obj_id)]:
+                        self.unlink(cursor, user,
+                                [self.unlink_mark[(model, obj_id)]])
+                        cursor.execute('DELETE FROM ir_values WHERE value=%s',
+                                (model + ',' + str(obj_id),))
+                except:
+                    logger.notify_channel('init', LOG_ERROR,
+                            'Could not delete id: %d of model %s\t' \
+                                    'There should be some relation ' \
+                                    'that points to this resource\t' \
+                                    'You should manually fix this ' \
+                                    'and restart --update=module' % \
+                                    (obj_id, model))
+        return True
+
+ModelData()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/rule.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,235 @@
+"Rule"
+from trytond.osv import fields, OSV
+from trytond.tools import Cache
+import time
+
+
+class RuleGroup(OSV):
+    "Rule group"
+    _name = 'ir.rule.group'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Name', size=128, select=1),
+        'model_id': fields.many2one('ir.model', 'Model', select=1,
+            required=True),
+        'global': fields.boolean('Global', select=1,
+            help="Make the rule global \n" \
+                    "or it needs to be put on a group or user"),
+        'rules': fields.one2many('ir.rule', 'rule_group', 'Tests',
+            help="The rule is satisfied if at least one test is True"),
+        'groups': fields.many2many('res.groups', 'group_rule_group_rel',
+            'rule_group_id', 'group_id', 'Groups'),
+        'users': fields.many2many('res.users', 'user_rule_group_rel',
+            'rule_group_id', 'user_id', 'Users'),
+    }
+    _order = 'model_id, global DESC'
+    _defaults = {
+        'global': lambda *a: True,
+    }
+
+    def unlink(self, cursor, user, ids, context=None):
+        res = super(RuleGroup, self).unlink(cursor, user, ids,
+                context=context)
+        # Restart the cache on the domain_get method of ir.rule
+        self.pool.get('ir.rule').domain_get()
+        return res
+
+    def create(self, cursor, user, vals, context=None):
+        res = super(RuleGroup, self).create(cursor, user, vals,
+                context=context)
+        # Restart the cache on the domain_get method of ir.rule
+        self.pool.get('ir.rule').domain_get()
+        return res
+
+    def write(self, cursor, user, ids, vals, context=None):
+        res = super(RuleGroup, self).write(cursor, user, ids, vals,
+                context=context)
+        # Restart the cache on the domain_get method of ir.rule
+        self.pool.get('ir.rule').domain_get()
+        return res
+
+RuleGroup()
+
+
+class Rule(OSV):
+    "Rule"
+    _name = 'ir.rule'
+    _rec_name = 'field_id'
+    _description = __doc__
+
+    def _operand(self, cursor, user, context):
+
+        def get(obj_name, level=3, recur=None, root_tech='', root=''):
+            res = []
+            if not recur:
+                recur = []
+            obj_fields = self.pool.get(obj_name).fields_get(cursor, user)
+            key = obj_fields.keys()
+            key.sort()
+            for k in key:
+
+                if obj_fields[k]['type'] in ('many2one'):
+                    res.append((root_tech + '.' + k + '.id',
+                        root + '/' + obj_fields[k]['string']))
+
+                elif obj_fields[k]['type'] in ('many2many', 'one2many'):
+                    res.append(('\',\'.join(map(lambda x: str(x.id), ' + \
+                            root_tech + '.' + k + '))',
+                        root + '/' + obj_fields[k]['string']))
+
+                else:
+                    res.append((root_tech + '.' + k,
+                        root + '/' + obj_fields[k]['string']))
+
+                if (obj_fields[k]['type'] in recur) and (level>0):
+                    res.extend(get(obj_fields[k]['relation'], level-1,
+                        recur, root_tech + '.' + k, root + '/' + \
+                                obj_fields[k]['string']))
+
+            return res
+
+        res = [("False", "False"), ("True", "True"), ("user.id", "User")]
+        res += get('res.users', level=1,
+                recur=['many2one'], root_tech='user', root='User')
+        return res
+
+    _columns = {
+        'field_id': fields.many2one('ir.model.fields', 'Field',
+            domain="[('model_id','=', parent.model_id)]", select=1,
+            required=True),
+        'operator':fields.selection([
+            ('=', '='),
+            ('<>', '<>'),
+            ('<=', '<='),
+            ('>=', '>='),
+            ('in', 'in'),
+            ('child_of', 'child_of'),
+            ], 'Operator', required=True),
+        'operand':fields.selection(_operand,'Operand', size=64, required=True),
+        'rule_group': fields.many2one('ir.rule.group', 'Group', select=2,
+            required=True, ondelete="cascade")
+    }
+
+    def domain_get(self, cursor, user, model_name):
+        # root user above constraint
+        if user == 1:
+            return '', []
+
+        cursor.execute("SELECT r.id FROM ir_rule r " \
+                "JOIN (ir_rule_group g " \
+                    "JOIN ir_model m ON (g.model_id = m.id)) " \
+                    "ON (g.id = r.rule_group) " \
+                "WHERE m.model = %s "
+                    "AND (g.id IN (" \
+                            "SELECT rule_group_id FROM user_rule_group_rel " \
+                                "WHERE user_id = %d " \
+                            "UNION SELECT rule_group_id " \
+                                "FROM group_rule_group_rel g_rel " \
+                                "JOIN res_groups_users_rel u_rel " \
+                                    "ON (g_rel.group_id = u_rel.gid) " \
+                                "WHERE u_rel.user = %d) "
+                    "OR g.global)", (model_name, user, user))
+        ids = [x[0] for x in cursor.fetchall()]
+        if not ids:
+            return '', []
+        obj = self.pool.get(model_name)
+        clause = {}
+        clause_global = {}
+        # Use root user to prevent recursion
+        for rule in self.browse(cursor, 1, ids):
+            if rule.operator in ('in', 'child_of'):
+                dom = eval("[('%s', '%s', [%s])]" % \
+                        (rule.field_id.name, rule.operator, rule.operand),
+                        {'user': self.pool.get('res.users').browse(cursor, 1,
+                            user), 'time': time})
+            else:
+                dom = eval("[('%s', '%s', %s)]" % \
+                        (rule.field_id.name, rule.operator, rule.operand),
+                        {'user': self.pool.get('res.users').browse(cursor, 1,
+                            user), 'time': time})
+
+            if rule.rule_group['global']:
+                clause_global.setdefault(rule.rule_group.id, [])
+                clause_global[rule.rule_group.id].append(
+                        obj._where_calc(cursor, user, dom, active_test=False))
+            else:
+                clause.setdefault(rule.rule_group.id, [])
+                clause[rule.rule_group.id].append(
+                        obj._where_calc(cursor, user, dom, active_test=False))
+
+        def _query(clauses, test):
+            query = ''
+            val = []
+            for groups in clauses.values():
+                if not groups:
+                    continue
+                if len(query):
+                    query += ' '+test+' '
+                query += '('
+                first = True
+                for group in groups:
+                    if not first:
+                        query += ' OR '
+                    first = False
+                    query += '('
+                    first2 = True
+                    for clause in group[0]:
+                        if not first2:
+                            query += ' AND '
+                        first2 = False
+                        query += clause
+                    query += ')'
+                    val += group[1]
+                query += ')'
+            return query, val
+
+        query = ''
+        val = []
+
+        # Test if there is no rule_group that have no rule
+        cursor.execute("""SELECT g.id FROM
+            ir_rule_group g
+                JOIN ir_model m ON (g.model_id = m.id)
+            WHERE m.model = %s
+                AND (g.id NOT IN (SELECT rule_group FROM ir_rule))
+                AND (g.id IN (SELECT rule_group_id FROM user_rule_group_rel
+                    WHERE user_id = %d
+                    UNION SELECT rule_group_id FROM group_rule_group_rel g_rel
+                        JOIN res_groups_users_rel u_rel
+                            ON g_rel.group_id = u_rel.gid
+                        WHERE u_rel.user = %d))""", (model_name, user, user))
+        if not cursor.fetchall():
+            query, val = _query(clause, 'OR')
+
+        query_global, val_global = _query(clause_global, 'AND')
+        if query_global:
+            if query:
+                query = '('+query+') AND '+query_global
+                val.extend(val_global)
+            else:
+                query = query_global
+                val = val_global
+
+        return query, val
+    domain_get = Cache()(domain_get)
+
+    def unlink(self, cursor, user, ids, context=None):
+        res = super(Rule, self).unlink(cursor, user, ids, context=context)
+        # Restart the cache on the domain_get method of ir.rule
+        self.domain_get()
+        return res
+
+    def create(self, cursor, user, vals, context=None):
+        res = super(Rule, self).create(cursor, user, vals, context=context)
+        # Restart the cache on the domain_get method of ir.rule
+        self.domain_get()
+        return res
+
+    def write(self, cursor, user, ids, vals, context=None):
+        res = super(Rule, self).write(cursor, user, ids, vals,
+                context=context)
+        # Restart the cache on the domain_get method
+        self.domain_get()
+        return res
+
+Rule()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/sequence.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,76 @@
+import time
+from trytond.osv import fields, OSV
+
+
+class SequenceType(OSV):
+    "Sequence type"
+    _name = 'ir.sequence.type'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Sequence Name',size=64, required=True),
+        'code': fields.char('Sequence Code',size=32, required=True),
+    }
+
+SequenceType()
+
+
+class Sequence(OSV):
+    "Sequence"
+    _name = 'ir.sequence'
+    _description = __doc__
+
+    @staticmethod
+    def _code_get(obj, cursor, user, context=None):
+        cursor.execute('select code, name from ir_sequence_type')
+        return cursor.fetchall()
+
+    _columns = {
+        'name': fields.char('Sequence Name',size=64, required=True),
+        'code': fields.selection(_code_get, 'Sequence Code',size=64,
+            required=True),
+        'active': fields.boolean('Active'),
+        'prefix': fields.char('Prefix',size=64),
+        'suffix': fields.char('Suffix',size=64),
+        'number_next': fields.integer('Next Number', required=True),
+        'number_increment': fields.integer('Increment Number', required=True),
+        'padding' : fields.integer('Number padding', required=True),
+    }
+    _defaults = {
+        'active': lambda *a: True,
+        'number_increment': lambda *a: 1,
+        'number_next': lambda *a: 1,
+        'padding' : lambda *a : 0,
+    }
+
+    @staticmethod
+    def _process(string):
+        return (string or '') % {
+                'year':time.strftime('%Y'),
+                'month': time.strftime('%m'),
+                'day':time.strftime('%d'),
+                }
+
+    def get_id(self, cursor, user, sequence_id, test='id=%d'):
+        cursor.execute('lock table ir_sequence')
+        cursor.execute('SELECT id, number_next, number_increment, prefix, ' \
+                    'suffix, padding ' \
+                'FROM ir_sequence ' \
+                'WHERE ' + test + ' AND active = True', (sequence_id,))
+        res = cursor.dictfetchone()
+        if res:
+            cursor.execute('UPDATE ir_sequence ' \
+                    'SET number_next = number_next + number_increment ' \
+                    'WHERE id = %d AND active = True', (res['id'],))
+            if res['number_next']:
+                return self._process(res['prefix']) + \
+                        '%%0%sd' % res['padding'] % res['number_next'] + \
+                        self._process(res['suffix'])
+            else:
+                return self._process(res['prefix']) + \
+                        self._process(res['suffix'])
+        return False
+
+    def get(self, cursor, user, code):
+        return self.get_id(cursor, user, code, test='code=%s')
+
+Sequence()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/translation.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,137 @@
+"Translation"
+from trytond.osv import fields, OSV, Cacheable
+from trytond import tools
+
+TRANSLATION_TYPE = [
+    ('field', 'Field'),
+    ('model', 'Model'),
+    ('rml', 'RML'),
+    ('selection', 'Selection'),
+    ('view', 'View'),
+    ('wizard_button', 'Wizard Button'),
+    ('wizard_field', 'Wizard Field'),
+    ('wizard_view', 'Wizard View'),
+    ('xsl', 'XSL'),
+    ('help', 'Help'),
+]
+
+class Translation(OSV, Cacheable):
+    "Translation"
+    _name = "ir.translation"
+    _log_access = False
+    _description = __doc__
+
+    def _get_language(self, cursor, user, context):
+        lang_obj = self.pool.get('res.lang')
+        lang_ids = lang_obj.search(cursor, user, [('translatable', '=', True)],
+                context=context)
+        langs = lang_obj.browse(cursor, user, lang_ids, context=context)
+        res = [(lang.code, lang.name) for lang in langs]
+        for lang_dict in tools.scan_languages():
+            if lang_dict not in res:
+                res.append(lang_dict)
+        return res
+
+    _columns = {
+        'name': fields.char('Field Name', size=128, required=True),
+        'res_id': fields.integer('Resource ID'),
+        'lang': fields.selection(_get_language, string='Language', size=5),
+        'type': fields.selection(TRANSLATION_TYPE, string='Type', size=16),
+        'src': fields.text('Source'),
+        'value': fields.text('Translation Value'),
+    }
+    _sql = """
+        CREATE INDEX ir_translation_ltn ON ir_translation (lang, type, name);
+        CREATE INDEX ir_translation_res_id ON ir_translation (res_id);
+    """
+
+    def _get_ids(self, cursor, name, ttype, lang, ids):
+        translations, to_fetch = {}, []
+        for obj_id in ids:
+            trans = self.get((lang, name, obj_id))
+            if trans is not None:
+                translations[obj_id] = trans
+            else:
+                to_fetch.append(obj_id)
+        if to_fetch:
+            cursor.execute('SELECT res_id, value ' \
+                    'FROM ir_translation ' \
+                    'WHERE lang = %s ' \
+                        'AND type = %s ' \
+                        'AND name = %s ' \
+                        'AND res_id in (' + \
+                            ','.join([str(x) for x in to_fetch]) + ')',
+                    (lang, ttype, name))
+            for res_id, value in cursor.fetchall():
+                self.add((lang, ttype, name, res_id), value)
+                translations[res_id] = value
+        for res_id in ids:
+            if res_id not in translations:
+                self.add((lang, ttype, name, res_id), False)
+                translations[res_id] = False
+        return translations
+
+    def _set_ids(self, cursor, user, name, ttype, lang, ids, value):
+        cursor.execute('DELETE FROM ir_translation ' \
+                'WHERE lang = %s ' \
+                    'AND type = %s ' \
+                    'AND name = %s ' \
+                    'AND res_id IN (' + ','.join([str(x) for x in ids]) + ')',
+                (lang, ttype, name))
+        for obj_id in ids:
+            self.create(cursor, user, {
+                'lang': lang,
+                'type': ttype,
+                'name': name,
+                'res_id': obj_id,
+                'value': value,
+                })
+        return len(ids)
+
+    def _get_source(self, cursor, name, ttype, lang, source=None):
+        trans = self.get((lang, ttype, name, source))
+        if trans is not None:
+            return trans
+
+        if source:
+            source = source.strip().replace('\n',' ')
+            if isinstance(source, unicode):
+                source = source.encode('utf8')
+            cursor.execute('select value ' \
+                    'from ir_translation ' \
+                    'where lang=%s ' \
+                        'and type=%s ' \
+                        'and name=%s ' \
+                        'and src=%s',
+                    (lang, ttype, str(name), source))
+        else:
+            cursor.execute('select value ' \
+                    'from ir_translation ' \
+                    'where lang=%s ' \
+                        'and type=%s ' \
+                        'and name=%s',
+                    (lang, ttype, str(name)))
+        res = cursor.fetchone()
+        if res:
+            self.add((lang, ttype, name, source), res[0])
+            return res[0]
+        else:
+            self.add((lang, ttype, name, source), False)
+            return False
+
+    def unlink(self, cursor, user, ids, context=None):
+        self.clear()
+        return super(Translation, self).unlink(cursor, user, ids,
+                context=context)
+
+    def create(self, cursor, user, vals, context=None):
+        self.clear()
+        return super(Translation, self).create(cursor, user, vals,
+                context=context)
+
+    def write(self, cursor, user, ids, vals, context=None):
+        self.clear()
+        return super(Translation, self).write(cursor, user, ids, vals,
+                context=context)
+
+Translation()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/ui/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,2 @@
+from menu import *
+from view import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/ui/menu.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,260 @@
+"UI menu"
+from trytond.osv import fields, OSV
+
+def one_in(i, j):
+    """Check the presence of an element of setA in setB
+    """
+    for k in i:
+        if k in j:
+            return True
+    return False
+
+ICONS = [(x, x) for x in [
+    'STOCK_ABOUT',
+    'STOCK_ADD',
+    'STOCK_APPLY',
+    'STOCK_BOLD',
+    'STOCK_CANCEL',
+    'STOCK_CDROM',
+    'STOCK_CLEAR',
+    'STOCK_CLOSE',
+    'STOCK_COLOR_PICKER',
+    'STOCK_CONNECT',
+    'STOCK_CONVERT',
+    'STOCK_COPY',
+    'STOCK_CUT',
+    'STOCK_DELETE',
+    'STOCK_DIALOG_AUTHENTICATION',
+    'STOCK_DIALOG_ERROR',
+    'STOCK_DIALOG_INFO',
+    'STOCK_DIALOG_QUESTION',
+    'STOCK_DIALOG_WARNING',
+    'STOCK_DIRECTORY',
+    'STOCK_DISCONNECT',
+    'STOCK_DND',
+    'STOCK_DND_MULTIPLE',
+    'STOCK_EDIT',
+    'STOCK_EXECUTE',
+    'STOCK_FILE',
+    'STOCK_FIND',
+    'STOCK_FIND_AND_REPLACE',
+    'STOCK_FLOPPY',
+    'STOCK_GOTO_BOTTOM',
+    'STOCK_GOTO_FIRST',
+    'STOCK_GOTO_LAST',
+    'STOCK_GOTO_TOP',
+    'STOCK_GO_BACK',
+    'STOCK_GO_DOWN',
+    'STOCK_GO_FORWARD',
+    'STOCK_GO_UP',
+    'STOCK_HARDDISK',
+    'STOCK_HELP',
+    'STOCK_HOME',
+    'STOCK_INDENT',
+    'STOCK_INDEX',
+    'STOCK_ITALIC',
+    'STOCK_JUMP_TO',
+    'STOCK_JUSTIFY_CENTER',
+    'STOCK_JUSTIFY_FILL',
+    'STOCK_JUSTIFY_LEFT',
+    'STOCK_JUSTIFY_RIGHT',
+    'STOCK_MEDIA_FORWARD',
+    'STOCK_MEDIA_NEXT',
+    'STOCK_MEDIA_PAUSE',
+    'STOCK_MEDIA_PLAY',
+    'STOCK_MEDIA_PREVIOUS',
+    'STOCK_MEDIA_RECORD',
+    'STOCK_MEDIA_REWIND',
+    'STOCK_MEDIA_STOP',
+    'STOCK_MISSING_IMAGE',
+    'STOCK_NETWORK',
+    'STOCK_NEW',
+    'STOCK_NO',
+    'STOCK_OK',
+    'STOCK_OPEN',
+    'STOCK_PASTE',
+    'STOCK_PREFERENCES',
+    'STOCK_PRINT',
+    'STOCK_PRINT_PREVIEW',
+    'STOCK_PROPERTIES',
+    'STOCK_QUIT',
+    'STOCK_REDO',
+    'STOCK_REFRESH',
+    'STOCK_REMOVE',
+    'STOCK_REVERT_TO_SAVED',
+    'STOCK_SAVE',
+    'STOCK_SAVE_AS',
+    'STOCK_SELECT_COLOR',
+    'STOCK_SELECT_FONT',
+    'STOCK_SORT_ASCENDING',
+    'STOCK_SORT_DESCENDING',
+    'STOCK_SPELL_CHECK',
+    'STOCK_STOP',
+    'STOCK_STRIKETHROUGH',
+    'STOCK_UNDELETE',
+    'STOCK_UNDERLINE',
+    'STOCK_UNDO',
+    'STOCK_UNINDENT',
+    'STOCK_YES',
+    'STOCK_ZOOM_100',
+    'STOCK_ZOOM_FIT',
+    'STOCK_ZOOM_IN',
+    'STOCK_ZOOM_OUT',
+    'terp-account',
+    'terp-crm',
+    'terp-mrp',
+    'terp-product',
+    'terp-purchase',
+    'terp-sale',
+    'terp-tools',
+    'terp-administration',
+    'terp-hr',
+    'terp-partner',
+    'terp-project',
+    'terp-report',
+    'terp-stock',
+    'terp-calendar',
+    'terp-graph',
+]]
+
+
+class Many2ManyUniq(fields.Many2Many):
+
+    def set(self, cursor, obj, obj_id, name, values, user=None, context=None):
+        if not values:
+            return
+        val = values[:]
+        for act in values:
+            if act[0] == 4:
+                cursor.execute('SELECT * FROM ' + self._rel + ' ' \
+                        'WHERE ' + self._id1 + ' = %d ' \
+                            'AND ' + self._id2 + ' = %d',
+                        (obj_id, act[1]))
+                if cursor.fetchall():
+                    val.remove(act)
+        return super(Many2ManyUniq, self).set(cursor, obj, obj_id, name, val,
+                user=user, context=context)
+
+
+class UIMenu(OSV):
+    "UI menu"
+    _name = 'ir.ui.menu'
+    _description = __doc__
+
+    def search(self, cursor, user, args, offset=0, limit=2000, order=None,
+            context=None, count=False):
+        res_user_obj = self.pool.get('res.users')
+        if context is None:
+            context = {}
+        ids = super(UIMenu, self).search(cursor, user, args, offset, limit,
+                order, context=context)
+        user_groups = res_user_obj.read(cursor, user, [user])[0]['groups_id']
+        result = []
+        for menu in self.browse(cursor, user, ids):
+            if not len(menu.groups_id):
+                result.append(menu.id)
+                continue
+            for group in menu.groups_id:
+                if group.id in user_groups:
+                    result.append(menu.id)
+                    break
+        if count:
+            return len(result)
+        return result
+
+    def _get_full_name(self, cursor, user, ids, name, args, context):
+        res = {}
+        for menu in self.browse(cursor, user, ids):
+            res[menu.id] = self._get_one_full_name(menu)
+        return res
+
+    def _get_one_full_name(self, menu, level=6):
+        if level <= 0:
+            return '...'
+        if menu.parent_id:
+            parent_path = self._get_one_full_name(menu.parent_id, level-1) + "/"
+        else:
+            parent_path = ''
+        return parent_path + menu.name
+
+    def copy(self, cursor, user, obj_id, default=None, context=None):
+        ir_values_obj = self.pool.get('ir.values')
+        res = super(UIMenu, self).copy(cursor, user, obj_id, context=context)
+        ids = ir_values_obj.search(cursor, user, [
+            ('model', '=', 'ir.ui.menu'),
+            ('res_id', '=', obj_id),
+            ])
+        for ir_value in ir_values_obj.browse(cursor, user, ids):
+            ir_values_obj.copy(cursor, user, ir_value.id,
+                    default={'res_id': res}, context=context)
+        return res
+
+    def _action(self, cursor, user, ids, name, arg, context=None):
+        res = {}
+        values_obj = self.pool.get('ir.values')
+        value_ids = values_obj.search(cursor, user, [
+            ('model', '=', self._name), ('key', '=', 'action'),
+            ('key2', '=', 'tree_but_open'), ('res_id', 'in', ids)],
+            context=context)
+        values_action = {}
+        for value in values_obj.browse(cursor, user, value_ids,
+                context=context):
+            values_action[value.res_id] = value.value
+        for menu_id in ids:
+            res[menu_id] = values_action.get(menu_id, False)
+        return res
+
+    def _action_inv(self, cursor, user, menu_id, name, value, arg,
+            context=None):
+        if context is None:
+            context = {}
+        ctx = context.copy()
+        if 'read_delta' in ctx:
+            del ctx['read_delta']
+        values_obj = self.pool.get('ir.values')
+        values_ids = values_obj.search(cursor, user, [
+            ('model', '=', self._name), ('key', '=', 'action'),
+            ('key2', '=', 'tree_but_open'), ('res_id', '=', menu_id)],
+            context=context)
+        if values_ids:
+            values_obj.write(cursor, user, values_ids[0], {'value': value},
+                    context=ctx)
+        else:
+            values_obj.create(cursor, user, {
+                'name': 'Menuitem',
+                'model': self._name,
+                'value': value,
+                'object': True,
+                'key': 'action',
+                'key2': 'tree_but_open',
+                'res_id': menu_id,
+                }, context=ctx)
+
+    _columns = {
+        'name': fields.char('Menu', size=64, required=True, translate=True),
+        'sequence': fields.integer('Sequence'),
+        'child_id' : fields.one2many('ir.ui.menu', 'parent_id','Child ids'),
+        'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True),
+        'groups_id': Many2ManyUniq('res.groups', 'ir_ui_menu_group_rel',
+            'menu_id', 'gid', 'Groups'),
+        'complete_name': fields.function(_get_full_name, method=True,
+            string='Complete Name', type='char', size=128),
+        'icon': fields.selection(ICONS, 'Icon', size=64),
+        'action': fields.function(_action, fnct_inv=_action_inv,
+            method=True, type='reference', string='Action',
+            selection=[
+                ('ir.actions.report.custom', 'ir.actions.report.custom'),
+                ('ir.actions.report.xml', 'ir.actions.report.xml'),
+                ('ir.actions.act_window', 'ir.actions.act_window'),
+                ('ir.actions.wizard', 'ir.actions.wizard'),
+                ]),
+    }
+    _defaults = {
+        'icon' : lambda *a: 'STOCK_OPEN',
+        'sequence' : lambda *a: 10,
+    }
+    _order = "sequence, id"
+
+UIMenu()
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/ui/view.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,75 @@
+"View"
+from trytond.osv import fields, OSV
+from xml import dom
+
+
+class View(OSV):
+    "View"
+    _name = 'ir.ui.view'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('View Name',size=64,  required=True),
+        'model': fields.char('Model', size=64, required=True),
+        'priority': fields.integer('Priority', required=True),
+        'type': fields.selection((
+            ('tree','Tree'),
+            ('form','Form'),
+            ('graph', 'Graph'),
+            ('calendar', 'Calendar')), 'View Type', required=True),
+        'arch': fields.text('View Architecture', required=True),
+        'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
+        'field_parent': fields.char('Childs Field',size=64)
+    }
+    _defaults = {
+        'arch': lambda *a: '<?xml version="1.0"?>\n' \
+                '<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
+        'priority': lambda *a: 16,
+    }
+    _order = "priority"
+
+    @staticmethod
+    def _check_xml(obj, cursor, user, ids):
+        "Check XML"
+        cursor.execute('SELECT arch FROM ir_ui_view ' \
+                'WHERE id IN (' + ','.join([str(x) for x in ids]) + ')')
+        for row in cursor.fetchall():
+            try:
+                dom.minidom.parseString(row[0])
+            except:
+                return False
+        return True
+
+    _constraints = [
+        (_check_xml, 'Invalid XML for View Architecture!', ['arch'])
+    ]
+
+View()
+
+
+class ViewShortcut(OSV):
+    "View shortcut"
+    _name = 'ir.ui.view_sc'
+    _description = __doc__
+    _columns = {
+        'name': fields.char('Shortcut Name', size=64, required=True),
+        'res_id': fields.integer('Resource Ref.', required=True),
+        'sequence': fields.integer('Sequence'),
+        'user_id': fields.many2one('res.users', 'User Ref.', required=True,
+            ondelete='cascade'),
+        'resource': fields.char('Resource Name', size=64, required=True)
+    }
+
+    def get_sc(self, cursor, user, user_id, model='ir.ui.menu', context=None):
+        "Provide user's shortcuts"
+        ids = self.search(cursor, user, [
+            ('user_id','=',user_id),
+            ('resource','=',model),
+            ], context=context)
+        return self.read(cursor, user, ids, ['res_id', 'name'], context=context)
+
+    _order = 'sequence'
+    _defaults = {
+        'resource': lambda *a: 'ir.ui.menu',
+    }
+
+ViewShortcut()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/ir/values.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,226 @@
+"Values"
+from trytond.osv import OSV, fields
+import pickle
+
+class Values(OSV):
+    "Values"
+    _name = 'ir.values'
+    _description = __doc__
+
+    def _value_unpickle(self, cursor, user, ids, name, arg, context=None):
+        res = {}
+        for report in self.browse(cursor, user, ids, context=context):
+            value = report[name[:-9]]
+            if not report.object and value:
+                try:
+                    value = str(pickle.loads(value))
+                except:
+                    pass
+            res[report.id] = value
+        return res
+
+    def _value_pickle(self, cursor, user, obj_id, name, value, arg,
+            context=None):
+        if context is None:
+            context = {}
+        ctx = context.copy()
+        if 'read_delta' in ctx:
+            del ctx['read_delta']
+        values = self.browse(cursor, user, obj_id, context=context)
+        if not values.object:
+            value = pickle.dumps(eval(value))
+        self.write(cursor, user, obj_id, {name[:-9]: value}, context=ctx)
+
+    _columns = {
+        'name': fields.char('Name', size=128),
+        'model': fields.char('Model', size=128),
+        'value': fields.text('Value'),
+        'value_unpickle': fields.function(_value_unpickle,
+            fnct_inv=_value_pickle, method=True, type='text', string='Value'),
+        'object': fields.boolean('Is Object'),
+        'key': fields.char('Type', size=128),
+        'key2': fields.char('Value', size=256),
+        'meta': fields.text('Meta Datas'),
+        'meta_unpickle': fields.function(_value_unpickle,
+            fnct_inv=_value_pickle, method=True, type='text',
+            string='Meta Datas'),
+        'res_id': fields.integer('Resource ID'),
+        'user_id': fields.many2one('res.users', 'User', ondelete='cascade'),
+
+
+        #TODO add in company module
+        #'company_id': fields.many2one('res.company', 'Company')
+    }
+    _defaults = {
+        'key': lambda *a: 'action',
+        'key2': lambda *a: 'tree_but_open',
+        #'company_id': lambda *a: False,
+    }
+
+    def _auto_init(self, cursor):
+        super(Values, self)._auto_init(cursor)
+        cursor.execute('SELECT indexname FROM pg_indexes ' \
+                'WHERE indexname = \'ir_values_key_model_key2_index\'')
+        if not cursor.fetchone():
+            cursor.execute('CREATE INDEX ir_values_key_model_key2_index ' \
+                    'ON ir_values (key, model, key2)')
+            cursor.commit()
+
+    def set(self, cursor, user, key, key2, name, models, value, replace=True,
+            isobject=False, meta=False, preserve_user=False, company=False):
+        if type(value)==type(u''):
+            value = value.encode('utf8')
+        if not isobject:
+            value = pickle.dumps(value)
+        if meta:
+            meta = pickle.dumps(meta)
+        ids_res = []
+        for model in models:
+            if type(model)==type([]) or type(model)==type(()):
+                model, res_id = model
+            else:
+                res_id = False
+            if replace:
+                if key in ('meta', 'default'):
+                    ids = self.search(cursor, user, [
+                        ('key', '=', key),
+                        ('key2', '=', key2),
+                        ('name', '=', name),
+                        ('model', '=', model),
+                        ('res_id', '=', res_id),
+                        ('user_id', '=', preserve_user and user)
+                        ])
+                else:
+                    ids = self.search(cursor, user, [
+                        ('key', '=', key),
+                        ('key2', '=', key2),
+                        ('value', '=', value),
+                        ('model', '=', model),
+                        ('res_id', '=', res_id),
+                        ('user_id', '=', preserve_user and user)
+                        ])
+                self.unlink(cursor, user, ids)
+            vals = {
+                'name': name,
+                'value': value,
+                'model': model,
+                'object': isobject,
+                'key': key,
+                'key2': key2 and key2[:200],
+                'meta': meta,
+                'user_id': preserve_user and user,
+            }
+            if company:
+                cid = self.pool.get('res.users').browse(cursor, user, user,
+                        context={}).company_id.id
+                vals['company_id'] = cid
+            if res_id:
+                vals['res_id'] = res_id
+            ids_res.append(self.create(cursor, user, vals))
+        return ids_res
+
+    def get(self, cursor, user, key, key2, models, meta=False, context=None,
+            res_id_req=False, without_user=True, key2_req=True):
+        result = []
+        for model in models:
+            if type(model)==type([]) or type(model)==type(()):
+                model, res_id = model
+            else:
+                res_id = False
+            where1 = ['key = %s', 'model = %s']
+            where2 = [key, str(model)]
+            where_opt = []
+            if key2:
+                where1.append('key2 = %s')
+                where2.append(key2[:200])
+            else:
+                dest = where1
+                if not key2_req or meta:
+                    dest = where_opt
+                dest.append('key2 IS NULL')
+
+            if res_id_req and (models[-1][0]==model):
+                if res_id:
+                    where1.append('res_id = %d' % (res_id,))
+                else:
+                    where1.append('(res_id IS NULL)')
+            elif res_id:
+                if (models[-1][0]==model):
+                    where1.append('(res_id = %d OR (res_id IS NULL))' % (res_id,))
+                    where_opt.append('res_id = %d' % (res_id,))
+                else:
+                    where1.append('res_id = %d' % (res_id,))
+
+#            if not without_user:
+            where_opt.append('user_id = %d' % (user,))
+
+
+            result = []
+            test = True
+            while test:
+                if not where_opt:
+                    cursor.execute('SELECT id FROM ir_values ' \
+                            'where ' + ' and '.join(where1) + ' ' \
+                                'AND user_id IS NULL', where2)
+                else:
+                    cursor.execute('select id FROM ir_values ' \
+                            'where ' + ' and '.join(where1 + where_opt),
+                            where2)
+                result.extend([x[0] for x in cursor.fetchall()])
+                if len(where_opt):
+                    where_opt.pop()
+                else:
+                    test = False
+
+            if result:
+                break
+
+        if not result:
+            return []
+        cid = self.pool.get('res.users').browse(cursor, user, user,
+                context={}).company_id.id
+        cursor.execute('SELECT id, name, value, object, meta, key ' \
+                'FROM ir_values ' \
+                'WHERE id IN (' + ','.join([str(x) for x in result])+') ' \
+                    'AND (company_id IS NULL OR company_id = %d) '\
+                'ORDER BY user_id', (cid,))
+        result = cursor.fetchall()
+
+        def _result_get(i, keys):
+            if i[1] in keys:
+                return False
+            keys.append(i[1])
+            if i[3]:
+                model, obj_id = i[2].split(',')
+                try:
+                    obj_id = int(obj_id)
+                except:
+                    return False
+                datas = self.pool.get(model).read(cursor, user, [obj_id], False,
+                        context=context)
+                if not len(datas):
+                    #ir_del(cursor, user, i[0])
+                    return False
+                def clean(j):
+                    for key in (
+                            'report_sxw_content',
+                            'report_rml_content',
+                            'report_sxw', 'report_rml',
+                            'report_sxw_content_data',
+                            'report_rml_content_data',
+                            ):
+                        if key in j:
+                            del j[key]
+                    return j
+                datas = clean(datas[0])
+            else:
+                datas = pickle.loads(i[2])
+            if meta:
+                meta2 = pickle.loads(i[4])
+                return (i[0], i[1], datas, meta2)
+            return (i[0], i[1], datas)
+        keys = []
+        res = filter(bool, map(lambda x: _result_get(x, keys), list(result)))
+        return res
+
+Values()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/module.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,312 @@
+import os, sys, imp
+import itertools
+from sets import Set
+from config import CONFIG
+import tools
+import pooler
+from netsvc import Logger, LOG_ERROR, LOG_INFO
+import zipfile
+
+OPJ = os.path.join
+ADDONS_PATH = OPJ(os.path.dirname(__file__), 'addons')
+sys.path.insert(1, ADDONS_PATH)
+
+class Graph(dict):
+
+    def add_node(self, name, deps):
+        max_depth, father = 0, None
+        for i in [Node(x, self) for x in deps]:
+            if i.depth >= max_depth:
+                father = i
+                max_depth = i.depth
+        if father:
+            father.add_child(name)
+        else:
+            Node(name, self)
+
+    def __iter__(self):
+        level = 0
+        done = Set(self.keys())
+        while done:
+            level_modules = [(name, module) for name, module in self.items() \
+                    if module.depth==level]
+            for name, module in level_modules:
+                done.remove(name)
+                yield module
+            level += 1
+
+
+class Singleton(object):
+
+    def __new__(cls, name, graph):
+        if name in graph:
+            inst = graph[name]
+        else:
+            inst = object.__new__(cls)
+            graph[name] = inst
+        return inst
+
+
+class Node(Singleton):
+
+    def __init__(self, name, graph):
+        super(Node, self).__init__()
+        self.name = name
+        self.graph = graph
+        self.datas = None
+        if not hasattr(self, 'childs'):
+            self.childs = []
+        if not hasattr(self, 'depth'):
+            self.depth = 0
+
+    def add_child(self, name):
+        node = Node(name, self.graph)
+        node.depth = self.depth + 1
+        if node not in self.childs:
+            self.childs.append(node)
+        for attr in ('init', 'update', 'demo'):
+            if hasattr(self, attr):
+                setattr(node, attr, True)
+        self.childs.sort(lambda x, y: cmp(x.name, y.name))
+
+    def has_child(self, name):
+        return Node(name, self.graph) in self.childs or \
+                bool([c for c in self.childs if c.has_child(name)])
+
+    def __setattr__(self, name, value):
+        super(Node, self).__setattr__(name, value)
+        if name in ('init', 'update', 'demo'):
+            CONFIG[name][self.name] = 1
+            for child in self.childs:
+                setattr(child, name, value)
+        if name == 'depth':
+            for child in self.childs:
+                setattr(child, name, value + 1)
+
+    def __iter__(self):
+        return itertools.chain(iter(self.childs),
+                *[iter(x) for x in self.childs])
+
+    def __str__(self):
+        return self.pprint()
+
+    def pprint(self, depth=0):
+        res = '%s\n' % self.name
+        for child in self.childs:
+            res += '%s`-> %s' % ('    ' * depth, child.pprint(depth + 1))
+        return res
+
+def create_graph(module_list, force=None):
+    if force is None:
+        force = []
+    graph = Graph()
+    packages = []
+
+    for module in module_list:
+        if module[-4:] == '.zip':
+            module = module[:-4]
+        terp_file = OPJ(ADDONS_PATH, module, '__terp__.py')
+        mod_path = OPJ(ADDONS_PATH, module)
+        if module in ('ir', 'res'):
+            root_path = os.path.dirname(__file__)
+            terp_file = OPJ(root_path, module, '__terp__.py')
+            mod_path = OPJ(root_path, module)
+        if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path+'.zip'):
+            try:
+                info = eval(tools.file_open(terp_file).read())
+            except:
+                Logger().notify_channel('init', LOG_ERROR,
+                        'addon:%s:eval file %s' % (module, terp_file))
+                raise
+            if info.get('installable', True):
+                packages.append((module, info.get('depends', []), info))
+
+    current, later = Set([x[0] for x in packages]), Set()
+    while packages and current > later:
+        package, deps, datas = packages[0]
+
+        # if all dependencies of 'package' are already in the graph,
+        # add 'package' in the graph
+        if reduce(lambda x, y: x and y in graph, deps, True):
+            if not package in current:
+                packages.pop(0)
+                continue
+            later.clear()
+            current.remove(package)
+            graph.add_node(package, deps)
+            node = Node(package, graph)
+            node.datas = datas
+            for kind in ('init', 'demo', 'update'):
+                if (package in CONFIG[kind]) \
+                        or ('all' in CONFIG[kind]) \
+                        or (kind in force):
+                    setattr(node, kind, True)
+        else:
+            later.add(package)
+            packages.append((package, deps, datas))
+        packages.pop(0)
+
+    for package in later:
+        Logger().notify_channel('init', LOG_ERROR,
+                'addon:%s:Unmet dependency' % package)
+    return graph
+
+def init_module_objects(cursor, module_name, obj_list):
+    Logger().notify_channel('init', LOG_INFO,
+            'addon:%s:creating or updating database tables' % module_name)
+    for obj in obj_list:
+        obj.auto_init(cursor)
+
+def load_module_graph(cursor, graph, **kwargs):
+    # **kwargs is passed directly to convert_xml_import
+    package_todo = []
+    statusi = 0
+    for package in graph:
+        module = package.name
+        Logger().notify_channel('init', LOG_INFO, 'addon:%s' % module)
+        sys.stdout.flush()
+        pool = pooler.get_pool(cursor.dbname)
+        modules = pool.instanciate(module)
+        cursor.execute('SELECT state, demo FROM ir_module_module WHERE name = %s',
+                (module,))
+        (package_state, package_demo) = (cursor.rowcount and cursor.fetchone()) \
+                or ('uninstalled', False)
+        idref = {}
+        if hasattr(package, 'init') \
+                or hasattr(package, 'update') \
+                or (package_state in ('to install', 'to upgrade')):
+            init_module_objects(cursor, module, modules)
+            for kind in ('init', 'update'):
+                for filename in package.datas.get('%s_xml' % kind, []):
+                    mode = 'update'
+                    if hasattr(package, 'init') or package_state=='to install':
+                        mode = 'init'
+                    Logger().notify_channel('init', LOG_INFO,
+                            'addon:%s:loading %s' % (module, filename))
+                    ext = os.path.splitext(filename)[1]
+                    if ext == '.csv':
+                        tools.convert_csv_import(cursor, module,
+                                os.path.basename(filename),
+                                tools.file_open(OPJ(module, filename)).read(),
+                                idref, mode=mode)
+                    elif ext == '.sql':
+                        queries = tools.file_open(OPJ(module,
+                            filename)).read().split(';')
+                        for query in queries:
+                            new_query = ' '.join(query.split())
+                            if new_query:
+                                cursor.execute(new_query)
+                    else:
+                        tools.convert_xml_import(cursor, module,
+                                tools.file_open(OPJ(module, filename)).read(),
+                                idref, mode=mode, **kwargs)
+            if hasattr(package, 'demo') \
+                    or (package_demo and package_state != 'installed'):
+                for xml in package.datas.get('demo_xml', []):
+                    ext = os.path.splitext(xml)[1]
+                    Logger().notify_channel('init', LOG_INFO,
+                            'addon:%s:loading %s' % (module, xml))
+                    if ext == '.csv':
+                        tools.convert_csv_import(cursor, module,
+                                os.path.basename(xml),
+                                tools.file_open(OPJ(module, xml)).read(), idref,
+                                noupdate=True)
+                    else:
+                        tools.convert_xml_import(cursor, module,
+                                tools.file_open(OPJ(module, xml)).read(), idref,
+                                noupdate=True, **kwargs)
+                cursor.execute('UPDATE ir_module_module SET demo = %s ' \
+                        'WHERE name = %s', (True, package.name))
+            package_todo.append(package.name)
+            cursor.execute("UPDATE ir_module_module SET state = 'installed' " \
+                    "WHERE state IN ('to upgrade', 'to install') " \
+                        "AND name = %s", (package.name,))
+        cursor.commit()
+        statusi += 1
+
+    pool = pooler.get_pool(cursor.dbname)
+    pool.get('ir.model.data')._process_end(cursor, 1, package_todo)
+    cursor.commit()
+
+def register_classes():
+    module_list = os.listdir(ADDONS_PATH)
+    module_list.append('ir')
+    import ir
+    import res
+    for package in create_graph(module_list):
+        module = package.name
+        Logger().notify_channel('init', LOG_INFO,
+                'addon:%s:registering classes' % module)
+        sys.stdout.flush()
+
+        if module in ('ir', 'res'):
+            continue
+
+        if not os.path.isfile(OPJ(ADDONS_PATH, module+'.zip')):
+            # XXX must restrict to only addons paths
+            imp.load_module(module, *imp.find_module(module))
+        else:
+            import zipimport
+            mod_path = OPJ(ADDONS_PATH, module+'.zip')
+            try:
+                zimp = zipimport.zipimporter(mod_path)
+                zimp.load_module(module)
+            except zipimport.ZipImportError:
+                Logger().notify_channel('init', LOG_ERROR,
+                        'Couldn\'t find module %s' % module)
+
+def load_modules(database, force_demo=False, update_module=False):
+    cursor = database.cursor()
+    force = []
+    if force_demo:
+        force.append('demo')
+    if update_module:
+        cursor.execute("SELECT name FROM ir_module_module " \
+                "WHERE state IN ('installed', 'to install', " \
+                    "'to upgrade', 'to remove')")
+    else:
+        cursor.execute("SELECT name FROM ir_module_module " \
+                "WHERE state IN ('installed', 'to upgrade', 'to remove')")
+    module_list = [name for (name,) in cursor.fetchall()]
+    graph = create_graph(module_list, force)
+    report = tools.AssertionReport()
+    load_module_graph(cursor, graph, report=report)
+    if report.get_report():
+        Logger().notify_channel('init', LOG_INFO, 'assert:%s' % report)
+
+    for kind in ('init', 'demo', 'update'):
+        CONFIG[kind] = {}
+
+    if update_module:
+        cursor.execute("SELECT name FROM ir_module_module " \
+                "WHERE state IN ('to remove')")
+        for (mod_name,) in cursor.fetchall():
+            pool = pooler.get_pool(cursor.dbname)
+            cursor.execute('SELECT model,res_id FROM ir_model_data ' \
+                    'WHERE NOT noupdate AND module = %s ' \
+                    'ORDER BY id DESC', (mod_name,))
+            for rmod, rid in cursor.fetchall():
+                # TODO: Improved
+                # I can not use the class_pool has _table could be
+                # defined in __init__ and I can not use the pool has
+                # the module could not be loaded in the pool
+                uid = 1
+                pool.get(rmod).unlink(cursor, uid, [rid])
+            cursor.commit()
+        # TODO: remove menu without actions of childs
+        cursor.execute('DELETE FROM ir_ui_menu ' \
+                'WHERE (id NOT IN (' \
+                        'SELECT parent_id FROM ir_ui_menu ' \
+                            'WHERE parent_id IS NOT NULL)) ' \
+                    'AND (id NOT IN (' \
+                        'SELECT res_id FROM ir_values ' \
+                            'WHERE model = \'ir.ui.menu\')) ' \
+                    'AND (id NOT IN (' \
+                        'SELECT res_id FROM ir_model_data ' \
+                            'WHERE model = \'ir.ui.menu\'))')
+
+        cursor.execute("UPDATE ir_module_module SET state = %s " \
+                "WHERE state IN ('to remove')", ('uninstalled',))
+        cursor.commit()
+        pooler.restart_pool(cursor.dbname)
+    cursor.close()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/netsvc.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,453 @@
+import time
+import threading
+import SimpleXMLRPCServer, signal, sys, xmlrpclib
+import SocketServer
+import socket
+import logging
+import os
+import tiny_socket
+
+_SERVICE = {}
+_GROUP = {}
+_RES = {}
+
+LOG_DEBUG = 'debug'
+LOG_INFO = 'info'
+LOG_WARNING = 'warn'
+LOG_ERROR = 'error'
+LOG_CRITICAL = 'critical'
+
+from config import CONFIG
+
+
+class ServiceEndPointCall(object):
+
+    def __init__(self, id, method):
+        self._id = id
+        self._meth = method
+
+    def __call__(self, *args):
+        _RES[self._id] = self._meth(*args)
+        return self._id
+
+
+class ServiceEndPoint(object):
+
+    def __init__(self, name, id):
+        self._id = id
+        self._meth = {}
+        service = _SERVICE[name]
+        for method in service.method:
+            self._meth[method] = service.method[method]
+
+    def __getattr__(self, name):
+        return ServiceEndPointCall(self._id, self._meth[name])
+
+
+class Service(object):
+    _serviceEndPointID = 0
+
+    def __init__(self, name=None):
+        if name is None:
+            return
+        _SERVICE[name] = self
+        self.__name = name
+        self.method = {}
+        self.exportedmethods = None
+        self._response_process = None
+        self._response_process_id = None
+        self.response = None
+
+    def join_group(self, name):
+        if not name in _GROUP:
+            _GROUP[name] = {}
+        _GROUP[name][self.__name] = self
+
+    def export_method(self, method):
+        if callable(method):
+            self.method[method.__name__] = method
+
+    def service_end_point(self, service):
+        if Service._serviceEndPointID >= 2**16:
+            Service._serviceEndPointID = 0
+        Service._serviceEndPointID += 1
+        return ServiceEndPoint(service, self._serviceEndPointID)
+
+    def conversation_id(self):
+        return 1
+
+    def process_response(self, service, id):
+        self._response_process, self._response_process_id = service, id
+
+    def process_failure(self, service, id):
+        pass
+
+    def resume_response(self, service):
+        pass
+
+    def cancel_response(self, service):
+        pass
+
+    def suspend_response(self, service):
+        if self._response_process:
+            self._response_process(self._response_process_id,
+                                   _RES[self._response_process_id])
+        self._response_process = None
+        self.response = service(self._response_process_id)
+
+    def abort_response(self, description, origin, details):
+        if not CONFIG['debug_mode']:
+            raise Exception("%s -- %s\n\n%s" % (origin, description, details))
+        else:
+            raise
+
+    def current_failure(self, service):
+        pass
+
+    def exec_workflow(self, database, user, passwd, object_name, method,
+            object_id):
+        return
+
+    def execute(self, database, user, passwd, object_name, method, *args):
+        return
+
+    def execute_cr(self, cursor, user, object_name, method, *args, **kargs):
+        return
+
+    def obj_list(self, database, user, passwd):
+        return
+
+    def create(self, cursor, user, ids, datas, context):
+        return
+
+    @staticmethod
+    def trg_validate(user, res_type, res_id, signal, cursor):
+        pass
+
+    @staticmethod
+    def trg_delete(user, res_type, res_id, cursor):
+        pass
+
+    @staticmethod
+    def trg_write(user, res_type, res_id, cursor):
+        pass
+
+    def trg_create(self, user, res_type, res_id, cursor):
+        pass
+
+class LocalService(Service):
+
+    def __init__(self, name):
+        super(LocalService, self).__init__()
+        self.__name = name
+        service = _SERVICE[name]
+        self.service = service
+        for method in service.method:
+            setattr(self, method, service.method[method])
+
+
+class ServiceUnavailable(Exception):
+    pass
+
+def service_exist(name):
+    return (name in _SERVICE) and bool(_SERVICE[name])
+
+def init_logger():
+    if CONFIG['logfile']:
+        logf = CONFIG['logfile']
+        # test if the directories exist, else create them
+        try:
+            if not os.path.exists(os.path.dirname(logf)):
+                os.makedirs(os.path.dirname(logf))
+            try:
+                fd_log = open(logf, 'a')
+                handler = logging.StreamHandler(fd_log)
+            except IOError:
+                sys.stderr.write("ERROR: couldn't open the logfile\n")
+                handler = logging.StreamHandler(sys.stdout)
+        except OSError:
+            sys.stderr.write("ERROR: couldn't create the logfile directory\n")
+            handler = logging.StreamHandler(sys.stdout)
+    else:
+        handler = logging.StreamHandler(sys.stdout)
+
+    # create a format for log messages and dates
+    formatter = logging.Formatter(
+            '[%(asctime)s] %(levelname)s:%(name)s:%(message)s',
+            '%a %b %d %H:%M:%S %Y')
+
+    # tell the handler to use this format
+    handler.setFormatter(formatter)
+
+    # add the handler to the root logger
+    logging.getLogger().addHandler(handler)
+    logging.getLogger().setLevel(logging.INFO)
+
+
+class Logger(object):
+
+    def notify_channel(self, name, level, msg):
+        log = logging.getLogger(name)
+        getattr(log, level)(msg)
+
+
+class Agent(object):
+    _timers = []
+    _logger = Logger()
+
+    def set_alarm(self, function, time_start, args=None, kwargs=None):
+        if not args:
+            args = []
+        if not kwargs:
+            kwargs = {}
+        wait = time_start - time.time()
+        if wait > 0:
+            self._logger.notify_channel('timers', LOG_DEBUG,
+                    "Job scheduled in %s seconds for %s.%s" % \
+                            (wait, function.im_class.__name__,
+                                function.func_name))
+            timer = threading.Timer(wait, function, args, kwargs)
+            timer.start()
+            self._timers.append(timer)
+        for timer in self._timers[:]:
+            if not timer.isAlive():
+                self._timers.remove(timer)
+
+    def quit(cls):
+        for timer in cls._timers:
+            timer.cancel()
+    quit = classmethod(quit)
+
+
+class RpcGateway(object):
+
+    def __init__(self, name):
+        self.name = name
+
+
+class Dispatcher(object):
+
+    def __init__(self):
+        pass
+
+    def monitor(self, sig):
+        pass
+
+    def run(self):
+        pass
+
+
+class XmlRpc(object):
+
+
+    class RpcGateway(object):
+
+        def __init__(self, name):
+            self.name = name
+
+
+class GenericXMLRPCRequestHandler(object):
+
+    def __init__(self):
+        self.path = ''
+
+    def _dispatch(self, method, params):
+        import traceback
+        try:
+            name = self.path.split("/")[-1]
+            service = LocalService(name)
+            meth = getattr(service, method)
+            service.service.response = None
+            res = meth(*params)
+            res2 = service.service.response
+            if res2 != None:
+                res = res2
+            return res
+        except Exception, exp:
+            tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
+                sys.exc_type, sys.exc_value, sys.exc_traceback))
+            if CONFIG['debug_mode']:
+                import pdb
+                traceb = sys.exc_info()[2]
+                pdb.post_mortem(traceb)
+            raise xmlrpclib.Fault(str(exp), tb_s)
+
+
+class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
+        SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
+    SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.rpc_paths = (
+            '/xmlrpc/db',
+            '/xmlrpc/common',
+            '/xmlrpc/object',
+            '/xmlrpc/report',
+            '/xmlrpc/wizard',
+            )
+
+
+class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
+        SimpleXMLRPCServer.SimpleXMLRPCServer):
+
+    def server_bind(self):
+        self.socket.setsockopt(socket.SOL_SOCKET,
+                socket.SO_REUSEADDR, 1)
+        SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
+
+
+class HttpDaemon(threading.Thread):
+
+    def __init__(self, interface, port, secure=False):
+        threading.Thread.__init__(self)
+        self.__port = port
+        self.__interface = interface
+        self.secure = secure
+        self.running = False
+        if secure:
+#            from ssl import SecureXMLRPCServer
+#            class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
+#                    SecureXMLRPCServer.SecureXMLRPCRequestHandler):
+#                SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = (
+#                        '/xmlrpc/db',
+#                        '/xmlrpc/common',
+#                        '/xmlrpc/object',
+#                        '/xmlrpc/report',
+#                        '/xmlrpc/wizard',
+#                        )
+#            class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
+#                    SecureXMLRPCServer.SecureXMLRPCServer):
+#
+#                def server_bind(self):
+#                    self.socket.setsockopt(socket.SOL_SOCKET,
+#                            socket.SO_REUSEADDR, 1)
+#                    SecureXMLRPCServer.SecureXMLRPCServer.server_bind(self)
+#
+#            self.server = SecureThreadedXMLRPCServer((interface, port),
+#                    SecureXMLRPCRequestHandler,0)
+            raise
+        else:
+            self.server = SimpleThreadedXMLRPCServer((interface, port),
+                    SimpleXMLRPCRequestHandler,0)
+
+    def attach(self, path, gateway):
+        pass
+
+    def stop(self):
+        self.running = False
+        if os.name != 'nt':
+            if hasattr(socket, 'SHUT_RDWR'):
+                if self.secure:
+                    self.server.socket.sock_shutdown(socket.SHUT_RDWR)
+                else:
+                    self.server.socket.shutdown(socket.SHUT_RDWR)
+            else:
+                if self.secure:
+                    self.server.socket.sock_shutdown(2)
+                else:
+                    self.server.socket.shutdown(2)
+        self.server.socket.close()
+
+    def run(self):
+        self.server.register_introspection_functions()
+
+        self.running = True
+        while self.running:
+            self.server.handle_request()
+        return True
+
+        # If the server need to be run recursively
+        #
+        #signal.signal(signal.SIGALRM, self.my_handler)
+        #signal.alarm(6)
+        #while True:
+        #    self.server.handle_request()
+        #signal.alarm(0)          # Disable the alarm
+
+
+class TinySocketClientThread(threading.Thread):
+    def __init__(self, sock, threads):
+        threading.Thread.__init__(self)
+        self.sock = sock
+        self.threads = threads
+        self.running = False
+
+    def run(self):
+        import traceback
+        self.running = True
+        try:
+            tsock = tiny_socket.MySocket(self.sock)
+        except:
+            self.sock.close()
+            self.threads.remove(self)
+            return False
+        while self.running:
+            try:
+                msg = tsock.myreceive()
+            except:
+                self.sock.close()
+                self.threads.remove(self)
+                return False
+            try:
+                service = LocalService(msg[0])
+                method = getattr(service, msg[1])
+                service.service.response = None
+                res = method(*msg[2:])
+                res2 = service.service.response
+                if res2 != None:
+                    res = res2
+                tsock.mysend(res)
+            except Exception, exp:
+                tb_s = reduce(lambda x, y: x+y,
+                        traceback.format_exception(sys.exc_type,
+                            sys.exc_value, sys.exc_traceback))
+                if CONFIG['debug_mode']:
+                    import pdb
+                    tback = sys.exc_info()[2]
+                    pdb.post_mortem(tback)
+                tsock.mysend(exp, exception=True, traceback=tb_s)
+            except:
+                pass
+            self.sock.close()
+            self.threads.remove(self)
+            return True
+
+    def stop(self):
+        self.running = False
+
+
+class TinySocketServerThread(threading.Thread):
+    def __init__(self, interface, port, secure=False):
+        threading.Thread.__init__(self)
+        self.__port = port
+        self.__interface = interface
+        self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        self.socket.bind((self.__interface, self.__port))
+        self.socket.listen(5)
+        self.threads = []
+        self.secure = secure
+        self.running = False
+
+    def run(self):
+        try:
+            self.running = True
+            while self.running:
+                (clientsocket, address) = self.socket.accept()
+                c_thread = TinySocketClientThread(clientsocket, self.threads)
+                self.threads.append(c_thread)
+                c_thread.start()
+            self.socket.close()
+        except Exception, exp:
+            self.socket.close()
+            return False
+
+    def stop(self):
+        self.running = False
+        for thread in self.threads:
+            thread.stop()
+        try:
+            if hasattr(socket, 'SHUT_RDWR'):
+                self.socket.shutdown(socket.SHUT_RDWR)
+            else:
+                self.socket.shutdown(2)
+            self.socket.close()
+        except:
+            return False
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/osv/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,1 @@
+from osv import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/osv/fields.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,633 @@
+# -*- encoding: iso-8859-1 -*-
+"""
+ Fields:
+      - simple
+      - relations (one2many, many2one, many2many)
+      - function
+
+ Fields Attributes:
+   _classic_read: is a classic sql fields
+   _type   : field type
+   readonly
+   required
+   size
+
+ Relationals fields
+
+ Values: (0, 0,  { fields })    create
+         (1, ID, { fields })    modification
+         (2, ID)                remove (delete)
+         (3, ID)                unlink one (target id or target of relation)
+         (4, ID)                link
+         (5, ID)                unlink all (only valid for one2many)
+         (6, ?, ids)            set a list of links
+"""
+
+import psycopg
+import warnings
+import __builtin__
+
+def _symbol_set(symb):
+    if symb == None or symb == False:
+        return None
+    elif isinstance(symb, unicode):
+        return symb.encode('utf-8')
+    return str(symb)
+
+class _column(object):
+    _classic_read = True
+    _classic_write = True
+    _properties = False
+    _type = 'unknown'
+    _obj = None
+    _symbol_c = '%s'
+    _symbol_f = _symbol_set
+    _symbol_set = (_symbol_c, _symbol_f)
+    _symbol_get = None
+
+    def __init__(self, string='unknown', required=False, readonly=False,
+            domain=None, context='', states=None, priority=0,
+            change_default=False, size=None, ondelete="set null",
+            translate=False, select=False, **args):
+        self.states = states or {}
+        self.string = string
+        self.readonly = readonly
+        self.required = required
+        self.size = size
+        self.help = args.get('help', '')
+        self.priority = priority
+        self.change_default = change_default
+        self.ondelete = ondelete
+        self.translate = translate
+        self._domain = domain or []
+        self._context = context
+        self.group_name = False
+        self.view_load = 0
+        self.select = select
+        for i in args:
+            if args[i]:
+                setattr(self, i, args[i])
+
+    def restart(self):
+        pass
+
+    def set(self, cursor, obj, obj_id, name, value, user=None, context=None):
+        raise Exception, 'undefined get method !'
+
+    def get(self, cursor, obj, ids, name, user=None, offset=0, context=None,
+            values=None):
+        raise Exception, 'undefined get method !'
+
+
+class Boolean(_column):
+    _type = 'boolean'
+    _symbol_c = '%s'
+    _symbol_f = lambda x: x and 'True' or 'False'
+    _symbol_set = (_symbol_c, _symbol_f)
+
+boolean = Boolean
+
+
+class Integer(_column):
+    _type = 'integer'
+    _symbol_c = '%d'
+    _symbol_f = lambda x: int(x or 0)
+    _symbol_set = (_symbol_c, _symbol_f)
+
+integer = Integer
+
+
+class Reference(_column):
+    _type = 'reference'
+
+    def __init__(self, string, selection, size, **args):
+        _column.__init__(self, string=string, size=size, selection=selection,
+                **args)
+
+reference = Reference
+
+
+class Char(_column):
+    _type = 'char'
+
+    def __init__(self, string, size, **args):
+        _column.__init__(self, string=string, size=size, **args)
+        self._symbol_set = (self._symbol_c, self._symbol_set_char)
+
+    @staticmethod
+    def _symbol_set_char(symb):
+        """
+        takes a string (encoded in utf8)
+        and returns a string (encoded in utf8)
+        """
+        #TODO we need to remove the "symb==False" from the next line BUT
+        #TODO for now too many things rely on this broken behavior
+        #TODO the symb==None test should be common to all data types
+        if symb == None or symb == False:
+            return None
+
+        # we need to convert the string to a unicode object to be able
+        # to evaluate its length (and possibly truncate it) reliably
+        if isinstance(symb, str):
+            u_symb = unicode(symb, 'utf8')
+        elif isinstance(symb, unicode):
+            u_symb = symb
+        else:
+            u_symb = unicode(symb)
+        return u_symb.encode('utf8')
+
+char = Char
+
+
+class Text(_column):
+    _type = 'text'
+
+text = Text
+
+
+class Float(_column):
+    _type = 'float'
+    _symbol_c = '%f'
+    _symbol_f = lambda x: __builtin__.float(x or 0.0)
+    _symbol_set = (_symbol_c, _symbol_f)
+
+    def __init__(self, string='unknown', digits=None, **args):
+        _column.__init__(self, string=string, **args)
+        self.digits = digits
+
+float = Float
+
+
+class Date(_column):
+    _type = 'date'
+
+date = Date
+
+
+class DateTime(_column):
+    _type = 'datetime'
+
+datetime = DateTime
+
+
+class Time(_column):
+    _type = 'time'
+
+time = Time
+
+
+class Binary(_column):
+    _type = 'binary'
+    _symbol_c = '%s'
+    _symbol_f = lambda symb: symb and psycopg.Binary(symb) or None
+    _symbol_set = (_symbol_c, _symbol_f)
+
+binary = Binary
+
+
+class Selection(_column):
+    _type = 'selection'
+
+    def __init__(self, selections, string='unknown', **args):
+        _column.__init__(self, string=string, **args)
+        self.selection = selections
+
+selection = Selection
+
+
+class One2One(_column):
+    _classic_read = False
+    _classic_write = True
+    _type = 'one2one'
+
+    def __init__(self, obj, string='unknown', **args):
+        warnings.warn("The one2one field doesn't work anymore",
+                DeprecationWarning)
+        _column.__init__(self, string=string, **args)
+        self._obj = obj
+
+    def set(self, cursor, obj_src, src_id, field, act, user=None, context=None):
+        if context is None:
+            context = {}
+        obj = obj_src.pool.get(self._obj)
+        if act[0] == 0:
+            id_new = obj.create(cursor, user, act[1])
+            cursor.execute('UPDATE "' + obj_src._table + '" ' \
+                    'SET "' + field + '" = %d ' \
+                    'WHERE id = %d', (id_new, src_id))
+        else:
+            cursor.execute('SELECT "' + field + '" ' \
+                    'FROM "' + obj_src._table + '" ' \
+                    'WHERE id = %d', (act[0],))
+            (obj_id,) = cursor.fetchone()
+            obj.write(cursor, user, [obj_id] , act[1], context=context)
+
+one2one = One2One
+
+
+class Many2One(_column):
+    _classic_read = False
+    _classic_write = True
+    _type = 'many2one'
+
+    def __init__(self, obj, string='unknown', **args):
+        _column.__init__(self, string=string, **args)
+        self._obj = obj
+
+    # TODO: speed improvement
+    # name is the name of the relation field
+    def get(self, cursor, obj, ids, name, user=None, offset=0, context=None,
+            values=None):
+        if context is None:
+            context = {}
+        if values is None:
+            values = {}
+        res = {}
+        for i in values:
+            res[i['id']] = i[name]
+        for i in ids:
+            res.setdefault(i, '')
+        obj = obj.pool.get(self._obj)
+        names = obj.name_get(cursor, user, res.values(), context=context)
+
+        for i in res.keys():
+            if res[i] and res[i] in names:
+                res[i] = (res[i], names[res[i]])
+            else:
+                res[i] = False
+        return res
+
+    def set(self, cursor, obj_src, obj_id, field, values, user=None,
+            context=None):
+        if context is None:
+            context = {}
+        obj = obj_src.pool.get(self._obj)
+        table = obj_src.pool.get(self._obj)._table
+        if type(values) == type([]):
+            for act in values:
+                if act[0] == 0:
+                    id_new = obj.create(cursor, act[2])
+                    cursor.execute('UPDATE "' + obj_src._table + '" ' \
+                            'SET "' + field + '" = %d ' \
+                            'WHERE id = %d', (id_new, obj_id))
+                elif act[0] == 1:
+                    obj.write(cursor, [act[1]], act[2], context=context)
+                elif act[0] == 2:
+                    cursor.execute('DELETE FROM "' + table + '" ' \
+                            'WHERE id = %d', (act[1],))
+                elif act[0] == 3 or act[0] == 5:
+                    cursor.execute('UPDATE "' + obj_src._table + '" ' \
+                            'SET "' + field + '" = NULL ' \
+                            'WHERE id = %d', (obj_id,))
+                elif act[0] == 4:
+                    cursor.execute('UPDATE "' + obj_src._table + '" ' \
+                            'SET "' + field + '" = %d ' \
+                            'WHERE id = %d', (act[1], obj_id))
+        else:
+            if values:
+                cursor.execute('UPDATE "' + obj_src._table + '" ' \
+                        'SET "' + field + '" = %d ' \
+                        'WHERE id = %d', (values, obj_id))
+            else:
+                cursor.execute('UPDATE "' + obj_src._table + '" ' \
+                        'SET "' + field + '" = NULL ' \
+                        'WHERE id = %d', (obj_id,))
+
+many2one = Many2One
+
+
+class One2Many(_column):
+    _classic_read = False
+    _classic_write = False
+    _type = 'one2many'
+
+    def __init__(self, obj, fields_id, string='unknown', limit=None, **args):
+        _column.__init__(self, string=string, **args)
+        self._obj = obj
+        self._fields_id = fields_id
+        self._limit = limit
+        #one2many can't be used as condition for defaults
+        assert(self.change_default != True)
+
+    def get(self, cursor, obj, ids, name, user=None, offset=0, context=None,
+            values=None):
+        if context is None:
+            context = {}
+        if values is None:
+            values = {}
+        res = {}
+        for i in ids:
+            res[i] = []
+        ids2 = obj.pool.get(self._obj).search(cursor, user,
+                [(self._fields_id, 'in', ids)], offset=offset,
+                limit=self._limit)
+        for i in obj.pool.get(self._obj)._read_flat(cursor, user, ids2,
+                [self._fields_id], context=context, load='_classic_write'):
+            res[i[self._fields_id]].append( i['id'] )
+        return res
+
+    def set(self, cursor, obj, obj_id, field, values, user=None, context=None):
+        if context is None:
+            context = {}
+        if not values:
+            return
+        _table = obj.pool.get(self._obj)._table
+        obj = obj.pool.get(self._obj)
+        for act in values:
+            if act[0] == 0:
+                act[2][self._fields_id] = obj_id
+                obj.create(cursor, user, act[2], context=context)
+            elif act[0] == 1:
+                obj.write(cursor, user, [act[1]] , act[2], context=context)
+            elif act[0] == 2:
+                obj.unlink(cursor, user, [act[1]], context=context)
+            elif act[0] == 3:
+                cursor.execute('UPDATE "' + _table + '" ' \
+                        'SET "' + self._fields_id + '" = NULL ' \
+                        'WHERE id = %d', (act[1],))
+            elif act[0] == 4:
+                cursor.execute('UPDATE "' + _table + '" ' \
+                        'SET "' + self._fields_id + '" = %d ' \
+                        'WHERE id = %d', (obj_id, act[1]))
+            elif act[0] == 5:
+                cursor.execute('UPDATE "' + _table + '" ' \
+                        'SET "' + self._fields_id + '" = NULL ' \
+                        'WHERE "' + self._fields_id + '" = %d', (obj_id,))
+            elif act[0] == 6:
+                if not act[2]:
+                    ids2 = [0]
+                else:
+                    ids2 = act[2]
+                cursor.execute('UPDATE "' + _table + '" ' \
+                        'SET "' + self._fields_id + '" = NULL ' \
+                        'WHERE "' + self._fields_id + '" = %d ' \
+                            'AND id not IN (' + \
+                                ','.join([str(x) for x in ids2]) + ')',
+                                (obj_id,))
+                if act[2]:
+                    cursor.execute('UPDATE "' + _table + '" ' \
+                            'SET "' + self._fields_id + '" = %d ' \
+                            'WHERE id IN (' + \
+                                ','.join([str(x) for x in act[2]]) + ')',
+                                (obj_id,))
+
+one2many = One2Many
+
+
+class Many2Many(_column):
+    _classic_read = False
+    _classic_write = False
+    _type = 'many2many'
+
+    def __init__(self, obj, rel, id1, id2, string='unknown', limit=None,
+            **args):
+        _column.__init__(self, string=string, **args)
+        self._obj = obj
+        self._rel = rel
+        self._id1 = id1
+        self._id2 = id2
+        self._limit = limit
+
+    def get(self, cursor, obj, ids, name, user=None, offset=0, context=None,
+            values=None):
+        if context is None:
+            context = {}
+        if values is None:
+            values = {}
+        res = {}
+        if not ids:
+            return res
+        for i in ids:
+            res[i] = []
+        ids_s = ','.join([str(x) for x in ids])
+        limit_str = self._limit is not None and ' limit %d' % self._limit or ''
+        obj = obj.pool.get(self._obj)
+
+        domain1, domain2 = obj.pool.get('ir.rule').domain_get(cursor,
+                user, obj._name)
+        if domain1:
+            domain1 = ' and '+domain1
+
+        cursor.execute('SELECT ' + self._rel + '.' + self._id2 + ', ' + \
+                    self._rel + '.' + self._id1 + ' ' \
+                'FROM "' + self._rel + '" , "' + obj._table + '" ' \
+                'WHERE ' + \
+                    self._rel + '.' + self._id1 + ' IN (' + ids_s + ') ' \
+                    'AND ' + self._rel + '.' + self._id2 + ' = ' + \
+                        obj._table + '.id ' + domain1 + \
+                limit_str + ' ORDER BY ' + obj._table + '.' + obj._order + \
+                ' offset %d', domain2 + [offset])
+        for i in cursor.fetchall():
+            res[i[1]].append(i[0])
+        return res
+
+    def set(self, cursor, obj, obj_id, name, values, user=None, context=None):
+        if context is None:
+            context = {}
+        if not values:
+            return
+        obj = obj.pool.get(self._obj)
+        for act in values:
+            if act[0] == 0:
+                idnew = obj.create(cursor, user, act[2])
+                cursor.execute('INSERT INTO "' + self._rel + '" ' \
+                        '(' + self._id1 + ', ' + self._id2 + ') ' \
+                        'VALUES (%d, %d)', (obj_id, idnew))
+            elif act[0] == 1:
+                obj.write(cursor, user, [act[1]] , act[2], context=context)
+            elif act[0] == 2:
+                obj.unlink(cursor, user, [act[1]], context=context)
+            elif act[0] == 3:
+                cursor.execute('DELETE FROM "' + self._rel + '" ' \
+                        'WHERE "' + self._id1 + '" = %d ' \
+                            'AND "'+ self._id2 + '" = %d', (obj_id, act[1]))
+            elif act[0] == 4:
+                cursor.execute('INSERT INTO "' + self._rel + '" ' \
+                        '(' + self._id1 + ', ' + self._id2 + ') ' \
+                        'VALUES (%d, %d)', (obj_id, act[1]))
+            elif act[0] == 5:
+                cursor.execute('UPDATE "' + self._rel + '" ' \
+                        'SET "' + self._id2 + '" = NULL ' \
+                        'WHERE "' + self._id2 + '" = %d', (obj_id,))
+            elif act[0] == 6:
+                domain1, domain2 = obj.pool.get('ir.rule').domain_get(cursor,
+                        user, obj._name)
+                if domain1:
+                    domain1 = ' AND ' + domain1
+                cursor.execute('DELETE FROM "' + self._rel + '" ' \
+                        'WHERE "' + self._id1 + '" = %d ' \
+                            'AND "' + self._id2 + '" IN (' \
+                            'SELECT ' + self._rel + '.' + self._id2 + ' ' \
+                            'FROM "' + self._rel + '", "' + obj._table + '" ' \
+                            'WHERE ' + self._rel + '.' + self._id1 + ' = %d ' \
+                                'AND ' + self._rel + '.' + self._id2 + ' = ' + \
+                                obj._table + '.id ' + domain1 + ')',
+                                [obj_id, obj_id] + domain2)
+
+                for act_nbr in act[2]:
+                    cursor.execute('INSERT INTO "' + self._rel + '" ' \
+                            '(' + self._id1 + ', ' + self._id2 + ') ' \
+                            'VALUES (%d, %d)', (obj_id, act_nbr))
+
+many2many = Many2Many
+
+
+class Function(_column):
+    _classic_read = False
+    _classic_write = False
+    _type = 'function'
+    _properties = True
+
+    def __init__(self, fnct, arg=None, fnct_inv=None, fnct_inv_arg=None,
+            type='float', fnct_search=None, obj=None, method=False,
+            store=False, **args):
+        _column.__init__(self, **args)
+        self._obj = obj
+        self._method = method
+        self._fnct = fnct
+        self._fnct_inv = fnct_inv
+        self._arg = arg
+        if 'relation' in args:
+            self._obj = args['relation']
+        self._fnct_inv_arg = fnct_inv_arg
+        if not fnct_inv:
+            self.readonly = 1
+        self._type = type
+        self._fnct_search = fnct_search
+        self.store = store
+        if type == 'float':
+            self._symbol_c = '%f'
+            self._symbol_f = lambda x: __builtin__.float(x or 0.0)
+            self._symbol_set = (self._symbol_c, self._symbol_f)
+
+    def search(self, cursor, uid, obj, name, args):
+        if not self._fnct_search:
+            return []
+        return self._fnct_search(obj, cursor, uid, obj, name, args)
+
+    def get(self, cursor, obj, ids, name, user=None, offset=0, context=None,
+            values=None):
+        if context is None:
+            context = {}
+        if values is None:
+            values = {}
+        res = {}
+        table = obj._table
+        if self._method:
+            # TODO get HAS to receive uid for permissions !
+            return self._fnct(obj, cursor, user, ids, name, self._arg, context)
+        else:
+            return self._fnct(cursor, table, ids, name, self._arg, context)
+
+    def set(self, cursor, obj, obj_id, name, value, user=None, context=None):
+        if context is None:
+            context = {}
+        if self._fnct_inv:
+            self._fnct_inv(obj, cursor, user, obj_id, name, value,
+                    self._fnct_inv_arg, context)
+
+function = Function
+
+
+class Serialized(_column):
+    def __init__(self, string='unknown', serialize_func=repr,
+            deserialize_func=eval, type='text', **args):
+        self._serialize_func = serialize_func
+        self._deserialize_func = deserialize_func
+        self._type = type
+        self._symbol_set = (self._symbol_c, self._serialize_func)
+        self._symbol_get = self._deserialize_func
+        super(serialized, self).__init__(string=string, **args)
+
+serialized = Serialized
+
+
+class Property(function):
+
+    def _fnct_write(self, obj, cursor, user, obj_id, prop, id_val, val,
+            context=None):
+        if context is None:
+            context = {}
+        (obj_dest,) = val
+        definition_id = self._field_get(cursor, user, obj._name, prop)
+
+        property = obj.pool.get('ir.property')
+        nid = property.search(cursor, user, [('fields_id', '=', definition_id),
+            ('res_id', '=', obj._name + ',' + str(obj_id))])
+        # TODO remove query from while statement
+        while len(nid):
+            cursor.execute('DELETE FROM ir_property WHERE id = %d',
+                    (nid.pop(),))
+
+        nid = property.search(cursor, user, [('fields_id', '=', definition_id),
+            ('res_id', '=', False)])
+        default_val = False
+        if nid:
+            default_val = property.browse(cursor, user, nid[0], context).value
+
+        company_id = obj.pool.get('res.users').company_get(cursor, user, user)
+        res = False
+        newval = (id_val and obj_dest + ',' + str(id_val)) or False
+        if (newval != default_val) and newval:
+            propdef = obj.pool.get('ir.model.fields').browse(cursor, user,
+                    definition_id, context=context)
+            res = property.create(cursor, user, {
+                'name': propdef.name,
+                'value': newval,
+                'res_id': obj._name + ',' + str(obj_id),
+                'company_id': company_id,
+                'fields_id': definition_id,
+            }, context=context)
+        return res
+
+    def _fnct_read(self, obj, cursor, user, ids, prop, val, context=None):
+        if context is None:
+            context = {}
+        property = obj.pool.get('ir.property')
+        definition_id = self._field_get(cursor, user, obj._name, prop)
+
+        nid = property.search(cursor, user, [('fields_id', '=', definition_id),
+            ('res_id', '=', False)])
+        default_val = False
+        if nid:
+            value = property.browse(cursor, user, nid[0], context).value
+            default_val = (value and int(value.split(',')[1])) or False
+
+        vids = [obj._name + ',' + str(obj_id) for obj_id in  ids]
+        nids = property.search(cursor, user, [('fields_id', '=', definition_id),
+            ('res_id', 'in', vids)])
+
+        res = {}
+        for obj_id in ids:
+            res[obj_id] = default_val
+        for prop in property.browse(cursor, user, nids):
+            res[int(prop.res_id.split(',')[1])] = (prop.value and \
+                    int(prop.value.split(',')[1])) or False
+
+        obj = obj.pool.get(self._obj)
+        names = obj.name_get(cursor, user, res.values(), context=context)
+        for i in res.keys():
+            if res[i] and res[i] in names:
+                res[i] = (res[i], names[res[i]])
+            else:
+                res[i] = False
+        return res
+
+    def _field_get(self, cursor, user, model_name, prop):
+        if not self.field_id.get(cursor.dbname):
+            cursor.execute('SELECT id ' \
+                    'FROM ir_model_fields ' \
+                    'WHERE name = %s AND model = %s', (prop, model_name))
+            res = cursor.fetchone()
+            self.field_id[cursor.dbname] = res and res[0]
+        return self.field_id[cursor.dbname]
+
+    def __init__(self, obj_prop, **args):
+        self.field_id = {}
+        function.__init__(self, self._fnct_read, False, self._fnct_write,
+                (obj_prop, ), **args)
+
+    def restart(self):
+        self.field_id = {}
+
+property = Property
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/osv/orm.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,2272 @@
+# -*- coding: utf-8 -*-
+
+from xml import dom
+from trytond.netsvc import Logger, LOG_ERROR, LOG_WARNING, LocalService
+import fields
+
+def intersect(i, j):
+    return [x for x in j if x in i]
+
+
+class ExceptORM(Exception):
+
+    def __init__(self, name, value):
+        Exception.__init__(self)
+        self.name = name
+        self.value = value
+        self.args = (name, value)
+
+except_orm = ExceptORM
+
+
+class BrowseNull(object):
+    "Readonly python database object browser"
+
+    def __init__(self):
+        self._id = False
+
+    @staticmethod
+    def __getitem__(name):
+        return False
+
+    @staticmethod
+    def __int__():
+        return False
+
+    @staticmethod
+    def __str__():
+        return ''
+
+    @staticmethod
+    def __nonzero__():
+        return False
+
+browse_null = BrowseNull
+
+
+# TODO: execute an object method on BrowseRecordList
+class BrowseRecordList(list):
+
+    def __init__(self, lst, context=None):
+        if not context:
+            context = {}
+        super(BrowseRecordList, self).__init__(lst)
+        self.context = context
+
+browse_record_list = BrowseRecordList
+
+
+class BrowseRecord(object):
+
+    def __init__(self, cursor, user, object_id, table, cache, context=None,
+            list_class=None):
+        '''
+        table : the object (inherited from orm)
+        context : a dictionnary with an optionnal context
+        '''
+        if context is None:
+            context = {}
+        assert object_id, 'Wrong ID for the browse record, got ' + \
+                str(object_id) + ', expected an integer.'
+        self._list_class = list_class or BrowseRecordList
+        self._cursor = cursor
+        self._user = user
+        self._id = object_id
+        self._table = table
+        self._table_name = self._table._name
+        self._context = context
+
+        cache.setdefault(table._name, {})
+        self._data = cache[table._name]
+        if not id in self._data:
+            self._data[object_id] = {'id': object_id}
+        self._cache = cache
+
+    def __getitem__(self, name):
+        if name == 'id':
+            return self._id
+        if not self._data[self._id].has_key(name):
+            # build the list of fields we will fetch
+
+            # fetch the definition of the field which was asked for
+            if name in self._table._columns:
+                col = self._table._columns[name]
+            elif name in self._table._inherit_fields:
+                col = self._table._inherit_fields[name][2]
+            elif hasattr(self._table, name):
+                return getattr(self._table, name)
+            else:
+                logger = Logger()
+                logger.notify_channel('orm', LOG_ERROR,
+                        "Programming error: field '%s' " \
+                                "does not exist in object '%s'!" % \
+                                (name, self._table._name))
+                return False
+
+            # if the field is a classic one or a many2one,
+            # we'll fetch all classic and many2one fields
+            if col._classic_write:
+                # gen the list of "local" (ie not inherited)
+                # fields which are classic or many2one
+                ffields = [x for x in self._table._columns.items() \
+                        if x[1]._classic_write]
+                # gen the list of inherited fields
+                inherits = [(x[0], x[1][2]) for x in \
+                        self._table._inherit_fields.items()]
+                # complete the field list with the inherited fields
+                # which are classic or many2one
+                ffields += [x for x in inherits if x[1]._classic_write]
+            # otherwise we fetch only that field
+            else:
+                ffields = [(name, col)]
+            ids = [x for x in self._data.keys() \
+                    if not self._data[x].has_key(name)]
+            # read the data
+            datas = self._table.read(self._cursor, self._user, ids,
+                    [x[0] for x in ffields], context=self._context,
+                    load="_classic_write")
+
+            # create browse records for 'remote' objects
+            for data in datas:
+                for i, j in ffields:
+                    if j._type in ('many2one', 'one2one'):
+                        if data[i]:
+                            obj = self._table.pool.get(j._obj)
+                            if not j._classic_write:
+                                ids2 = data[i][0]
+                            else:
+                                ids2 = data[i]
+                            data[i] = BrowseRecord(self._cursor, self._user,
+                                    ids2, obj, self._cache,
+                                    context=self._context,
+                                    list_class=self._list_class)
+                        else:
+                            data[i] = BrowseNull()
+                    elif j._type in ('one2many', 'many2many') and len(data[i]):
+                        data[i] = self._list_class([BrowseRecord(self._cursor,
+                            self._user, x, self._table.pool.get(j._obj),
+                            self._cache, context=self._context,
+                            list_class=self._list_class) for x in data[i]],
+                            self._context)
+                self._data[data['id']].update(data)
+        return self._data[self._id][name]
+
+    def __getattr__(self, name):
+        # TODO raise an AttributeError exception
+        return self[name]
+
+    def __contains__(self, name):
+        return (name in self._table._columns) \
+                or (name in self._table._inherit_fields) \
+                or hasattr(self._table, name)
+
+    def __hasattr__(self, name):
+        return name in self
+
+    def __int__(self):
+        return self._id
+
+    def __str__(self):
+        return "BrowseRecord(%s, %d)" % (self._table_name, self._id)
+
+    def __eq__(self, other):
+        return (self._table_name, self._id) == (other._table_name, other._id)
+
+    def __ne__(self, other):
+        return (self._table_name, self._id) != (other._table_name, other._id)
+
+    # we need to define __unicode__ even though we've already defined __str__
+    # because we have overridden __getattr__
+    def __unicode__(self):
+        return unicode(str(self))
+
+    def __hash__(self):
+        return hash((self._table_name, self._id))
+
+    __repr__ = __str__
+
+browse_record = BrowseRecord
+
+def get_pg_type(field):
+    '''
+    returns a tuple
+    (type returned by postgres when the column was created,
+    type expression to create the column)
+    '''
+    type_dict = {
+            fields.boolean:'bool',
+            fields.integer:'int4',
+            fields.text:'text',
+            fields.date:'date',
+            fields.time:'time',
+            fields.datetime:'timestamp',
+            fields.binary:'bytea',
+            fields.many2one:'int4',
+            }
+
+    if type_dict.has_key(type(field)):
+        f_type = (type_dict[type(field)], type_dict[type(field)])
+    elif isinstance(field, fields.float):
+        if field.digits:
+            f_type = ('numeric', 'NUMERIC(%d, %d)' % \
+                    (field.digits[0],field.digits[1]))
+        else:
+            f_type = ('float8', 'DOUBLE PRECISION')
+    elif isinstance(field, (fields.char, fields.reference)):
+        f_type = ('varchar', 'VARCHAR(%d)' % (field.size,))
+    elif isinstance(field, fields.selection):
+        if isinstance(field.selection, list) \
+                and isinstance(field.selection[0][0], (str, unicode)):
+            f_size = reduce(lambda x, y: max(x, len(y[0])), field.selection,
+                    field.size or 16)
+        elif isinstance(field.selection, list) \
+                and isinstance(field.selection[0][0], int):
+            f_size = -1
+        else:
+            f_size = (hasattr(field,'size') and field.size) or 16
+
+        if f_size == -1:
+            f_type = ('int4', 'INTEGER')
+        else:
+            f_type = ('varchar', 'VARCHAR(%d)' % f_size)
+    elif isinstance(field, fields.function) \
+            and type_dict.has_key(eval('fields.' + (field._type))):
+        ftype = eval('fields.' + (field._type))
+        f_type = (type_dict[ftype], type_dict[ftype])
+    elif isinstance(field, fields.function) and field._type == 'float':
+        f_type = ('float8', 'DOUBLE PRECISION')
+    else:
+        logger = Logger()
+        logger.notify_channel("init", LOG_WARNING,
+                '%s type not supported!' % (type(field)))
+        f_type = None
+    return f_type
+
+
+class ORM(object):
+    """
+    Object relationnal mapping to postgresql module
+       . Hierarchical structure
+       . Constraints consistency, validations
+       . Object meta Data depends on its status
+       . Optimised processing by complex query (multiple actions at once)
+       . Default fields value
+       . Permissions optimisation
+       . Persistant object: DB postgresql
+       . Datas conversions
+       . Multi-level caching system
+       . 2 different inheritancies
+       . Fields:
+            - classicals (varchar, integer, boolean, ...)
+            - relations (one2many, many2one, many2many)
+            - functions
+    """
+    _columns = {}
+    _sql_constraints = []
+    _constraints = []
+    _defaults = {}
+    _log_access = True
+    _table = None
+    _name = None
+    _rec_name = 'name'
+    _parent_name = 'parent_id'
+    _date_name = 'date'
+    _order = 'id'
+    _inherits = {}
+    _inherit = None
+    _sequence = None
+    _description = __doc__
+    _protected = [
+            'read',
+            'write',
+            'create',
+            'default_get',
+            'unlink',
+            'fields_get',
+            'fields_view_get',
+            'search',
+            'name_get',
+            'name_search',
+            'copy',
+            'import_data',
+            'search_count',
+            ]
+    _auto = True
+    _obj = None
+    _sql = ''
+    _inherit_fields = []
+    pool = None
+
+    def _field_create(self, cursor):
+        cursor.execute("SELECT id FROM ir_model WHERE model='%s'" % self._name)
+        if not cursor.rowcount:
+            # reference model in order to have a description
+            # of its fonctionnality in custom_report
+            cursor.execute("INSERT INTO ir_model " \
+                    "(model, name, info) VALUES (%s, %s, %s)",
+                    (self._name, self._description, self.__doc__))
+        cursor.commit()
+
+        for k in self._columns:
+            field = self._columns[k]
+            cursor.execute("SELECT id, relate FROM ir_model_fields " \
+                    "WHERE model = %s AND name = %s", (self._name, k))
+            if not cursor.rowcount:
+                cursor.execute("SELECT id FROM ir_model WHERE model = %s",
+                        (self._name,))
+                (model_id,) = cursor.fetchone()
+                cursor.execute("INSERT INTO ir_model_fields " \
+                        "(model_id, model, name, field_description, ttype, " \
+                            "relation, group_name, view_load) " \
+                        "VALUES (%d, %s, %s, %s, %s, %s, %s, %s)",
+                        (model_id, self._name, k,
+                            field.string.replace("'", " "), field._type,
+                            field._obj or 'NULL', field.group_name or '',
+                            (field.view_load and 'True') or 'False'))
+        cursor.commit()
+
+    def auto_init(self, cursor):
+        self.init(cursor)
+        self._auto_init(cursor)
+
+    def init(self, cursor):
+        pass
+
+    def _auto_init(self, cursor):
+        logger = Logger()
+        create = False
+        self._field_create(cursor)
+        if self._auto:
+            cursor.execute("SELECT relname FROM pg_class " \
+                    "WHERE relkind in ('r', 'v') AND relname = %s",
+                    (self._table,))
+            if not cursor.rowcount:
+                cursor.execute("CREATE TABLE \"%s\" " \
+                        "(id SERIAL NOT NULL, " \
+                            "PRIMARY KEY(id)) WITH OIDS" % self._table)
+                create = True
+            cursor.commit()
+            if self._log_access:
+                logs = {
+                    'create_uid': 'INTEGER REFERENCES res_users ' \
+                            'ON DELETE SET NULL',
+                    'create_date': 'TIMESTAMP',
+                    'write_uid': 'INTEGER REFERENCES res_users ' \
+                            'ON DELETE SET NULL',
+                    'write_date': 'TIMESTAMP'
+                }
+                for k in logs:
+                    cursor.execute("SELECT c.relname " \
+                        "FROM pg_class c, pg_attribute a " \
+                        "WHERE c.relname = %s " \
+                            "AND a.attname = %s " \
+                            "AND c.oid = a.attrelid",
+                            (self._table, k))
+                    if not cursor.rowcount:
+                        cursor.execute("ALTER TABLE \"%s\" " \
+                                "ADD COLUMN \"%s\" %s" %
+                            (self._table, k, logs[k]))
+                        cursor.commit()
+
+            # iterate on the database columns to drop the NOT NULL constraints
+            # of fields which were required but have been removed
+            cursor.execute(
+                "SELECT a.attname, a.attnotnull "\
+                "FROM pg_class c, pg_attribute a "\
+                "WHERE c.oid=a.attrelid AND c.relname='%s'" % self._table)
+            db_columns = cursor.dictfetchall()
+            for column in db_columns:
+                if column['attname'] not in (
+                        'id',
+                        'oid',
+                        'tableoid',
+                        'ctid',
+                        'xmin',
+                        'xmax',
+                        'cmin',
+                        'cmax',
+                        ):
+                    if column['attnotnull'] \
+                            and (column['attname'] not in self._columns):
+                        cursor.execute("ALTER TABLE \"%s\" " \
+                                "ALTER COLUMN \"%s\" DROP NOT NULL" % \
+                                (self._table, column['attname']))
+
+            # iterate on the "object columns"
+            for k in self._columns:
+                if k in (
+                        'id',
+                        'write_uid',
+                        'write_date',
+                        'create_uid',
+                        'create_date',
+                        ):
+                    continue
+
+                field = self._columns[k]
+                if isinstance(field, fields.one2many):
+                    cursor.execute("SELECT relname FROM pg_class " \
+                            "WHERE relkind = 'r' AND relname = %s",
+                            (field._obj,))
+                    if cursor.fetchone():
+                        cursor.execute("SELECT count(*) as c " \
+                                "FROM pg_class c, pg_attribute a " \
+                                "WHERE c.relname = %s " \
+                                    "AND a.attname = %s " \
+                                    "AND c.oid = a.attrelid",
+                                    (field._obj, field._fields_id))
+                        (res,) = cursor.fetchone()
+                        if not res:
+                            cursor.execute("ALTER TABLE \"%s\" " \
+                                    "ADD FOREIGN KEY (%s) " \
+                                    "REFERENCES \"%s\" ON DELETE SET NULL" % \
+                                    (self._obj, field._fields_id, field._table))
+                elif isinstance(field, fields.many2many):
+                    cursor.execute("SELECT relname FROM pg_class " \
+                            "WHERE relkind in ('r','v') AND relname=%s",
+                            (field._rel,))
+                    if not cursor.dictfetchall():
+                        #FIXME: Remove this try/except
+                        try:
+                            ref = self.pool.get(field._obj)._table
+                        except AttributeError:
+                            ref = field._obj.replace('.','_')
+                        cursor.execute("CREATE TABLE \"%s\" " \
+                                "(\"%s\" INTEGER NOT NULL REFERENCES \"%s\" " \
+                                    "ON DELETE CASCADE, " \
+                                "\"%s\" INTEGER NOT NULL REFERENCES \"%s\" " \
+                                    "ON DELETE CASCADE) WITH OIDS" % \
+                                    (field._rel, field._id1, self._table,
+                                        field._id2, ref))
+                        cursor.execute("CREATE INDEX \"%s_%s_index\" " \
+                                "ON \"%s\" (\"%s\")" % \
+                                (field._rel, field._id1, field._rel,
+                                    field._id1))
+                        cursor.execute("CREATE INDEX \"%s_%s_index\" " \
+                                "ON \"%s\" (\"%s\")" % \
+                                (field._rel, field._id2, field._rel,
+                                    field._id2))
+                        cursor.commit()
+                else:
+                    cursor.execute("SELECT c.relname, a.attname, a.attlen, " \
+                                "a.atttypmod, a.attnotnull, a.atthasdef, " \
+                                "t.typname, " \
+                                    "CASE WHEN a.attlen = -1 " \
+                                    "THEN a.atttypmod-4 " \
+                                    "ELSE a.attlen END as size " \
+                            "FROM pg_class c, pg_attribute a, pg_type t " \
+                            "WHERE c.relname = %s " \
+                                "AND a.attname = %s " \
+                                "AND c.oid = a.attrelid " \
+                                "AND a.atttypid = t.oid",
+                                (self._table, k.lower()))
+                    res = cursor.dictfetchall()
+                    if not res:
+                        if not isinstance(field, fields.function) \
+                                or field.store:
+                            # add the missing field
+                            cursor.execute("ALTER TABLE \"%s\" " \
+                                    "ADD COLUMN \"%s\" %s" % \
+                                    (self._table, k, get_pg_type(field)[1]))
+                            # initialize it
+                            if not create and k in self._defaults:
+                                default = self._defaults[k](self, cursor, 1, {})
+                                if not default:
+                                    cursor.execute("UPDATE \"%s\" " \
+                                            "SET \"%s\" = NULL" % \
+                                            (self._table, k))
+                                else:
+                                    cursor.execute("UPDATE \"%s\" " \
+                                            "SET \"%s\" = '%s'" % \
+                                            (self._table, k, default))
+                            # and add constraints if needed
+                            if isinstance(field, fields.many2one):
+                                #FIXME: Remove this try/except
+                                try:
+                                    ref = self.pool.get(field._obj)._table
+                                except AttributeError:
+                                    ref = field._obj.replace('.','_')
+                                # ir_actions is inherited so foreign
+                                # key doesn't work on it
+                                if ref != 'ir_actions':
+                                    cursor.execute("ALTER TABLE \"%s\" " \
+                                            "ADD FOREIGN KEY (\"%s\") " \
+                                                "REFERENCES \"%s\" " \
+                                                "ON DELETE %s" % \
+                                            (self._table, k, ref,
+                                                field.ondelete))
+                            if field.select:
+                                cursor.execute("CREATE INDEX \"%s_%s_index\" " \
+                                        "ON \"%s\" (\"%s\")" % \
+                                        (self._table, k, self._table, k))
+                            if field.required:
+                                try:
+                                    cursor.execute("ALTER TABLE \"%s\" " \
+                                            "ALTER COLUMN \"%s\" " \
+                                                "SET NOT NULL" % \
+                                                (self._table, k))
+                                except:
+                                    logger.notify_channel('init',
+                                            LOG_WARNING,
+                                            'Unable to set column %s ' \
+                                                    'of table %s not null !\n'\
+                                            'Try to re-run: ' \
+                                        'tinyerp-server.py --update=module\n' \
+                'If it doesn\'t work, update records and execute manually:\n' \
+                'ALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % \
+                                        (k, self._table, self._table, k))
+                            cursor.commit()
+                    elif len(res)==1:
+                        f_pg_def = res[0]
+                        f_pg_type = f_pg_def['typname']
+                        f_pg_size = f_pg_def['size']
+                        f_pg_notnull = f_pg_def['attnotnull']
+                        if isinstance(field, fields.function) \
+                                and not field.store:
+                            logger.notify_channel('init', LOG_WARNING,
+                                    'column %s (%s) in table %s was converted '\
+                                            'to a function !\n' \
+                        'You should remove this column from your database.' % \
+                                (k, field.string, self._table))
+                            f_obj_type = None
+                        else:
+                            f_obj_type = get_pg_type(field) \
+                                    and get_pg_type(field)[0]
+                        if f_obj_type:
+                            if f_pg_type != f_obj_type:
+                                logger.notify_channel('init',
+                                        LOG_WARNING,
+                                        "column '%s' in table '%s' has " \
+                                        "changed type (DB = %s, def = %s) !" % \
+                                        (k, self._table, f_pg_type,
+                                            field._type))
+                            if f_pg_type == 'varchar' \
+                                    and field._type == 'char' \
+                                    and f_pg_size != field.size:
+                                # columns with the name 'type' cannot be changed
+                                # for an unknown reason?!
+                                if k != 'type':
+                                    if f_pg_size > field.size:
+                                        logger.notify_channel('init',
+                                                LOG_WARNING,
+                                                "column '%s' in table '%s' " \
+        "has changed size (DB = %d, def = %d), strings will be truncated !" % \
+                                        (k, self._table, f_pg_size, field.size))
+#TODO: check si y a des donnees qui vont poser probleme (select char_length())
+#TODO: issue a log message even if f_pg_size < field.size
+                                    cursor.execute("ALTER TABLE \"%s\" " \
+                                            "RENAME COLUMN \"%s\" " \
+                                            "TO temp_change_size" % \
+                                            (self._table,k))
+                                    cursor.execute("ALTER TABLE \"%s\" " \
+                                            "ADD COLUMN \"%s\" VARCHAR(%d)" % \
+                                            (self._table,k,field.size))
+                                    cursor.execute("UPDATE \"%s\" " \
+                                "SET \"%s\" = temp_change_size::VARCHAR(%d)" % \
+                                        (self._table, k, field.size))
+                                    cursor.execute("ALTER TABLE \"%s\" " \
+                                            "DROP COLUMN temp_change_size" % \
+                                            (self._table,))
+                                    cursor.commit()
+                            # if the field is required
+                            # and hasn't got a NOT NULL constraint
+                            if field.required and f_pg_notnull == 0:
+                                # set the field to the default value if any
+                                if self._defaults.has_key(k):
+                                    default = self._defaults[k](self, cursor,
+                                            1, {})
+                                    if not (default is False):
+                                        cursor.execute("UPDATE \"%s\" " \
+                                        "SET \"%s\" = '%s' WHERE %s is NULL" % \
+                                            (self._table, k, default, k))
+                                        cursor.commit()
+                                # add the NOT NULL constraint
+                                try:
+                                    cursor.execute("ALTER TABLE \"%s\" " \
+                                        "ALTER COLUMN \"%s\" SET NOT NULL" % \
+                                        (self._table, k))
+                                    cursor.commit()
+                                except:
+                                    logger.notify_channel('init',
+                                            LOG_WARNING,
+                                            'unable to set ' \
+                    'a NOT NULL constraint on column %s of the %s table !\n' \
+'If you want to have it, you should update the records and execute manually:\n'\
+                            'ALTER TABLE %s ALTER COLUMN %s SET NOT NULL' % \
+                                        (k, self._table, self._table, k))
+                                cursor.commit()
+                            elif not field.required and f_pg_notnull == 1:
+                                cursor.execute("ALTER TABLE \"%s\" " \
+                                        "ALTER COLUMN \"%s\" DROP NOT NULL" % \
+                                        (self._table, k))
+                                cursor.commit()
+                            cursor.execute("SELECT indexname FROM pg_indexes " \
+                    "WHERE indexname = '%s_%s_index' AND tablename = '%s'" % \
+                                    (self._table, k, self._table))
+                            res = cursor.dictfetchall()
+                            if not res and field.select:
+                                cursor.execute("CREATE INDEX \"%s_%s_index\" " \
+                                        "ON \"%s\" (\"%s\")" % \
+                                        (self._table, k, self._table, k))
+                                cursor.commit()
+                            if res and not field.select:
+                                cursor.execute("DROP INDEX \"%s_%s_index\"" % \
+                                        (self._table, k))
+                                cursor.commit()
+                    else:
+                        # TODO add error message
+                        logger.notify_channel('init', LOG_ERROR, '')
+        else:
+            cursor.execute("SELECT relname FROM pg_class " \
+                    "WHERE relkind in ('r', 'v') AND relname = %s",
+                    (self._table,))
+            create = not bool(cursor.fetchone())
+
+        for (key, con, _) in self._sql_constraints:
+            cursor.execute("SELECT conname FROM pg_constraint " \
+                    "WHERE conname = %s", ((self._table + '_' + key),))
+            if not cursor.dictfetchall():
+                try:
+                    cursor.execute('ALTER TABLE \"%s\" ' \
+                            'ADD CONSTRAINT \"%s_%s\" %s' % \
+                            (self._table, self._table, key, con,))
+                    cursor.commit()
+                except:
+                    logger.notify_channel('init', LOG_WARNING,
+                            'unable to add \'%s\' constraint on table %s !\n' \
+'If you want to have it, you should update the records and execute manually:\n'\
+                            'ALTER table %s ADD CONSTRAINT %s_%s %s' % \
+                        (con, self._table, self._table, self._table,key, con,))
+
+        if create:
+            if hasattr(self, "_sql"):
+                for line in self._sql.split(';'):
+                    line2 = line.replace('\n', '').strip()
+                    if line2:
+                        cursor.execute(line2)
+                        cursor.commit()
+
+    def __init__(self):
+        if not self._table:
+            self._table = self._name.replace('.', '_')
+        if not self._description:
+            self._description = self._name
+        for (key, ham, msg) in self._sql_constraints:
+            self.pool._sql_error[self._table + '_' + key] = msg
+
+        self._inherits_reload()
+        if not self._sequence:
+            self._sequence = self._table+'_id_seq'
+        for k in self._defaults:
+            assert (k in self._columns) or (k in self._inherit_fields), \
+            'Default function defined in %s but field %s does not exist!' % \
+                (self._name, k,)
+        for field in self._columns:
+            self._columns[field].restart()
+        # FIXME: does not work at all
+#        if self._log_access:
+#            self._columns.update({
+#                'create_uid': fields.many2one('res.users', 'Creation user',
+#                       required=True, readonly=True),
+#                'create_date': fields.datetime('Creation date', required=True,
+#                       readonly=True),
+#                'write_uid': fields.many2one('res.users',
+#                       'Last modification by', readonly=True),
+#                'write_date': fields.datetime('Last modification date',
+#                       readonly=True),
+#                })
+#             self._defaults.update({
+#                 'create_uid': lambda self, cursor, user, context: user,
+#                 'create_date': lambda *a: time.strftime("%Y-%m-%d %H:%M:%S")
+#                 })
+
+    def _inherits_reload_src(self):
+        "Update objects that uses this one to update their _inherits fields"
+        for obj in self.pool.object_name_pool.values():
+            if self._name in obj._inherits:
+                obj._inherits_reload()
+
+    def _inherits_reload(self):
+        res = {}
+        for table in self._inherits:
+            res.update(self.pool.get(table)._inherit_fields)
+            for col in self.pool.get(table)._columns.keys():
+                res[col] = (table, self._inherits[table],
+                        self.pool.get(table)._columns[col])
+            for col in self.pool.get(table)._inherit_fields.keys():
+                res[col] = (table, self._inherits[table],
+                        self.pool.get(table)._inherit_fields[col][2])
+        self._inherit_fields = res
+        self._inherits_reload_src()
+
+    def browse(self, cursor, user, select, context=None, list_class=None):
+        if context is None:
+            context = {}
+        list_class = list_class or BrowseRecordList
+        cache = {}
+        # need to accepts ints and longs because ids coming from a method
+        # launched by button in the interface have a type long...
+        if isinstance(select, (int, long)):
+            return BrowseRecord(cursor, user, select, self, cache,
+                    context=context, list_class=list_class)
+        elif isinstance(select, list):
+            return list_class([BrowseRecord(cursor, user, x, self, cache,
+                context=context, list_class=list_class) for x in select],
+                context)
+        else:
+            # XXX raise exception?
+            return BrowseNull()
+
+    def __export_row(self, cursor, user, row, fields_names, context=None):
+        lines = []
+        data = ['' for x in range(len(fields_names))]
+        done = []
+        for fpos in range(len(fields_names)):
+            field = fields_names[fpos]
+            if field:
+                row2 = row
+                i = 0
+                while i < len(field):
+                    row2 = row2[field[i]]
+                    if not row2:
+                        break
+                    if isinstance(row2, (BrowseRecordList, list)):
+                        first = True
+                        fields2 = [(x[:i+1]==field[:i+1] and x[i+1:]) \
+                                or [] for x in fields_names]
+                        if fields2 in done:
+                            break
+                        done.append(fields2)
+                        for row2 in row2:
+                            lines2 = self.__export_row(cursor, user, row2,
+                                    fields2, context)
+                            if first:
+                                for fpos2 in range(len(fields_names)):
+                                    if lines2 and lines2[0][fpos2]:
+                                        data[fpos2] = lines2[0][fpos2]
+                                lines += lines2[1:]
+                                first = False
+                            else:
+                                lines += lines2
+                        break
+                    i += 1
+                if i == len(field):
+                    data[fpos] = str(row2 or '')
+        return [data] + lines
+
+    def export_data(self, cursor, user, ids, fields_names, context=None):
+        if context is None:
+            context = {}
+        fields_names = [x.split('/') for x in fields_names]
+        datas = []
+        for row in self.browse(cursor, user, ids, context):
+            datas += self.__export_row(cursor, user, row, fields_names, context)
+        return datas
+
+    # TODO: Send a request with the result and multi-thread !
+    def import_data(self, cursor, user, fields_names, datas, mode='init',
+            current_module=None, noupdate=False, context=None):
+        if context is None:
+            context = {}
+        fields_names = [x.split('/') for x in fields_names]
+        logger = Logger()
+
+        def process_liness(self, datas, prefix, fields_def, position=0):
+            line = datas[position]
+            row = {}
+            translate = {}
+            todo = []
+            warning = ''
+            data_id = False
+
+            # Import normal fields_names
+            for i in range(len(fields_names)):
+                if i >= len(line):
+                    raise Exception, \
+                            'Please check that all your lines have %d cols.' % \
+                            (len(fields_names),)
+                field = fields_names[i]
+                if field == ["id"]:
+                    data_id = line[i]
+                    continue
+                if (len(field) == len(prefix) + 1) \
+                        and field[len(prefix)].endswith(':id'):
+                    res_id = False
+                    if line[i]:
+                        if fields_def[field[len(prefix)][:-3]]['type'] \
+                                == 'many2many':
+                            res_id = []
+                            for word in line[i].split(','):
+                                if '.' in word:
+                                    module, xml_id = word.rsplit('.', 1)
+                                else:
+                                    module, xml_id = current_module, word
+                                ir_model_data_obj = \
+                                        self.pool.get('ir.model.data')
+                                new_id = ir_model_data_obj._get_id(cursor,
+                                        user, module, xml_id)
+                                res_id2 = ir_model_data_obj.read(cursor, user,
+                                        [new_id], ['res_id'])[0]['res_id']
+                                if res_id2:
+                                    res_id.append(res_id2)
+                            if len(res_id):
+                                res_id = [(6, 0, res_id)]
+                        else:
+                            if '.' in line[i]:
+                                module, xml_id = line[i].rsplit('.', 1)
+                            else:
+                                module, xml_id = current_module, line[i]
+                            ir_model_data_obj = self.pool.get('ir.model.data')
+                            new_id = ir_model_data_obj._get_id(cursor, user,
+                                    module, xml_id)
+                            res_id = ir_model_data_obj.read(cursor, user,
+                                    [new_id], ['res_id'])[0]['res_id']
+                    row[field[0][:-3]] = res_id or False
+                    continue
+                if (len(field) == len(prefix)+1) and \
+                        len(field[len(prefix)].split(':lang=')) == 2:
+                    field, lang = field[len(prefix)].split(':lang=')
+                    translate.setdefault(lang, {})[field]=line[i] or False
+                    continue
+                if (len(field) == len(prefix)+1) and \
+                        (prefix == field[0:len(prefix)]):
+                    if fields_def[field[len(prefix)]]['type'] == 'integer':
+                        res = line[i] and int(line[i])
+                    elif fields_def[field[len(prefix)]]['type'] == 'float':
+                        res = line[i] and float(line[i])
+                    elif fields_def[field[len(prefix)]]['type'] == 'selection':
+                        res = False
+                        if isinstance(
+                                fields_def[field[len(prefix)]]['selection'],
+                                (tuple, list)):
+                            sel = fields_def[field[len(prefix)]]['selection']
+                        else:
+                            sel = fields_def[field[len(prefix)]]['selection'](
+                                    self, cursor, user, context)
+                        for key, val in sel:
+                            if str(key) == line[i]:
+                                res = key
+                        if line[i] and not res:
+                            logger.notify_channel("import", LOG_WARNING,
+                                    "key '%s' not found " \
+                                            "in selection field '%s'" % \
+                                            (line[i], field[len(prefix)]))
+                    elif fields_def[field[len(prefix)]]['type'] == 'many2one':
+                        res = False
+                        if line[i]:
+                            relation = \
+                                    fields_def[field[len(prefix)]]['relation']
+                            res2 = self.pool.get(relation).name_search(cursor,
+                                    user, line[i], [], operator='=')
+                            res = (res2 and res2[0][0]) or False
+                            if not res:
+                                warning += ('Relation not found: ' + line[i] + \
+                                        ' on ' + relation + ' !\n')
+                                logger.notify_channel("import",
+                                        LOG_WARNING,
+                                        'Relation not found: ' + line[i] + \
+                                                ' on ' + relation + ' !\n')
+                    elif fields_def[field[len(prefix)]]['type'] == 'many2many':
+                        res = []
+                        if line[i]:
+                            relation = \
+                                    fields_def[field[len(prefix)]]['relation']
+                            for word in line[i].split(','):
+                                res2 = self.pool.get(relation).name_search(
+                                        cursor, user, word, [], operator='=')
+                                res3 = (res2 and res2[0][0]) or False
+                                if not res3:
+                                    warning += ('Relation not found: ' + \
+                                            line[i] + ' on '+relation + ' !\n')
+                                    logger.notify_channel("import",
+                                            LOG_WARNING,
+                                            'Relation not found: ' + line[i] + \
+                                                    ' on '+relation + ' !\n')
+                                else:
+                                    res.append(res3)
+                            if len(res):
+                                res = [(6, 0, res)]
+                    else:
+                        res = line[i] or False
+                    row[field[len(prefix)]] = res
+                elif (prefix==field[0:len(prefix)]):
+                    if field[0] not in todo:
+                        todo.append(field[len(prefix)])
+
+            # Import one2many fields
+            nbrmax = 1
+            for field in todo:
+                newfd = self.pool.get(fields_def[field]['relation']).fields_get(
+                        cursor, user, context=context)
+                res = process_liness(self, datas, prefix + [field], newfd,
+                        position)
+                (newrow, max2, warning2, translate2, data_id2) = res
+                nbrmax = max(nbrmax, max2)
+                warning = warning + warning2
+                reduce(lambda x, y: x and y, newrow)
+                row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
+                        [(0,0,newrow)]) or []
+                i = max2
+                while (position+i)<len(datas):
+                    test = True
+                    for j in range(len(fields_names)):
+                        field2 = fields_names[j]
+                        if (len(field2) <= (len(prefix)+1)) \
+                                and datas[position+i][j]:
+                            test = False
+                    if not test:
+                        break
+
+                    (newrow, max2, warning2, translate2, data_id2) = \
+                            process_liness(self, datas, prefix+[field], newfd,
+                                    position + i)
+                    warning = warning + warning2
+                    if reduce(lambda x, y: x or y, newrow.values()):
+                        row[field].append((0, 0, newrow))
+                    i += max2
+                    nbrmax = max(nbrmax, i)
+
+            if len(prefix) == 0:
+                for i in range(max(nbrmax, 1)):
+                    datas.pop(0)
+            result = (row, nbrmax, warning, translate, data_id)
+            return result
+
+        fields_def = self.fields_get(cursor, user, context=context)
+        done = 0
+
+        while len(datas):
+            res = {}
+            try:
+                (res, other, warning, translate, data_id) = \
+                        process_liness(self, datas, [], fields_def)
+                if warning:
+                    cursor.rollback()
+                    return (-1, res, warning, '')
+                new_id = self.pool.get('ir.model.data')._update(cursor, user,
+                        self._name, current_module, res, xml_id=data_id,
+                        mode=mode, noupdate=noupdate)
+                for lang in translate:
+                    context2 = context.copy()
+                    context2['lang'] = lang
+                    self.write(cursor, user, [new_id], translate[lang],
+                            context=context2)
+            except Exception, exp:
+                logger.notify_channel("import", LOG_ERROR, exp)
+                cursor.rollback()
+                return (-1, res, exp[0], warning)
+            done += 1
+        return (done, 0, 0, 0)
+
+    def read(self, cursor, user, ids, fields_names=None, context=None,
+            load='_classic_read'):
+        if context is None:
+            context = {}
+        self.pool.get('ir.model.access').check(cursor, user, self._name, 'read')
+        if not fields_names:
+            fields_names = self._columns.keys() + self._inherit_fields.keys()
+        select = ids
+        if isinstance(ids, (int, long)):
+            select = [ids]
+        result =  self._read_flat(cursor, user, select, fields_names, context,
+                load)
+        for i in result:
+            for key, j in i.items():
+                if j == None:
+                    i[key] = False
+        if isinstance(ids, (int, long)):
+            return result[0]
+        return result
+
+    def _read_flat(self, cursor, user, ids, fields_names, context=None,
+            load='_classic_read'):
+        if context is None:
+            context = {}
+        if not ids:
+            return []
+
+        if fields_names == None:
+            fields_names = self._columns.keys()
+
+        # construct a clause for the rules :
+        domain1, domain2 = self.pool.get('ir.rule').domain_get(cursor, user,
+                self._name)
+
+        # all inherited fields + all non inherited fields
+        # for which the attribute whose name is in load is True
+        fields_pre = [x for x in fields_names if x in self._columns \
+                and getattr(self._columns[x], '_classic_write')] + \
+                self._inherits.values()
+
+        if len(fields_pre) :
+            fields_pre2 = map(lambda x: (x in ('create_date', 'write_date')) \
+                    and ('date_trunc(\'second\', ' + x + ') as ' + x) \
+                    or '"' + x + '"', fields_pre)
+            if domain1:
+                cursor.execute(('SELECT ' + ','.join(fields_pre2 + ['id']) + \
+                        ' FROM \"' + self._table +'\" ' \
+                        'WHERE id in (' + ','.join([str(x) for x in ids]) + ')'\
+                        ' AND ' + domain1 + ' order by ' + self._order),
+                        domain2)
+                if not cursor.rowcount == len({}.fromkeys(ids)):
+                    raise ExceptORM('AccessError',
+                            'You try to bypass an access rule ' \
+                                    '(Document type: %s).' % self._description)
+            else:
+                cursor.execute('SELECT ' + ','.join(fields_pre2 + ['id']) + \
+                        ' FROM \"' + self._table + '\" ' \
+                        'WHERE id in (' + ','.join([str(x) for x in ids]) + ')'\
+                        ' ORDER BY ' + self._order)
+
+            res = cursor.dictfetchall()
+        else:
+            res = [{'id':x} for x in ids]
+
+        for field in fields_pre:
+            if self._columns[field].translate:
+                ids = [x['id'] for x in res]
+                res_trans = self.pool.get('ir.translation').get_ids(cursor,
+                        self._name + ',' + field, 'model',
+                        context.get('lang', 'en_US'), ids)
+                for i in res:
+                    i[field] = res_trans.get(i['id'], False) or i[field]
+
+        for table in self._inherits:
+            col = self._inherits[table]
+            cols = intersect(self._inherit_fields.keys(), fields_names)
+            if not cols:
+                continue
+            res2 = self.pool.get(table).read(cursor, user,
+                    [x[col] for x in res], cols, context, load)
+
+            res3 = {}
+            for i in res2:
+                res3[i['id']] = i
+                del i['id']
+
+            for record in res:
+                record.update(res3[record[col]])
+                if col not in fields_names:
+                    del record[col]
+
+        # all fields which need to be post-processed
+        # by a simple function (symbol_get)
+        fields_post = [x for x in fields_names if x in self._columns \
+                and self._columns[x]._symbol_get]
+        if fields_post:
+            # maybe it would be faster to iterate on the fields_names then
+            # on res,  so that we wouldn't need to get the _symbol_get
+            # in each occurence
+            for i in res:
+                for field in fields_post:
+                    i[field] = self._columns[field]._symbol_get(i[field])
+        ids = [x['id'] for x in res]
+
+        # all non inherited fields for which the attribute
+        # whose name is in load is False
+        fields_post = [x for x in fields_names if x in self._columns \
+                and not getattr(self._columns[x], load)]
+        for field in fields_post:
+            # get the value of that field for all records/ids
+            res2 = self._columns[field].get(cursor, self, ids, field, user,
+                    context=context, values=res)
+            for record in res:
+                record[field] = res2[record['id']]
+        return res
+
+    def _validate(self, cursor, user, ids):
+        field_error = []
+        field_err_str = []
+        for field in self._constraints:
+            if not field[0](self, cursor, user, ids):
+                if len(field) > 1:
+                    field_error += field[2]
+                field_err_str.append(field[1])
+        if len(field_err_str):
+            raise ExceptORM('ValidateError',
+                    ('\n'.join(field_err_str), ','.join(field_error)))
+
+    def default_get(self, cursor, user, fields_names, context=None):
+        if context is None:
+            context = {}
+        value = {}
+        # get the default values for the inherited fields
+        for i in self._inherits.keys():
+            value.update(self.pool.get(i).default_get(cursor, user,
+                fields_names, context=context))
+
+        # get the default values defined in the object
+        for field in fields_names:
+            if field in self._defaults:
+                value[field] = self._defaults[field](self, cursor, user,
+                        context)
+
+        # get the default values set by the user and override the default
+        # values defined in the object
+        ir_values_obj = self.pool.get('ir.values')
+        res = ir_values_obj.get(cursor, user, 'default', False, [self._name])
+        for value_id, field, field_value in res:
+            if field in fields_names:
+                fld_def = (field in self._columns) and self._columns[field] \
+                        or self._inherit_fields[field][2]
+                if fld_def._type in ('many2one', 'one2one'):
+                    obj = self.pool.get(fld_def._obj)
+                    if not obj.search(cursor, user, [('id', '=', field_value)]):
+                        continue
+                if fld_def._type in ('many2many'):
+                    obj = self.pool.get(fld_def._obj)
+                    field_value2 = []
+                    for i in range(len(field_value)):
+                        if not obj.search(cursor, user, [('id', '=',
+                            field_value[i])]):
+                            continue
+                        field_value2.append(field_value[i])
+                    field_value = field_value2
+                if fld_def._type in ('one2many'):
+                    obj = self.pool.get(fld_def._obj)
+                    field_value2 = []
+                    for i in range(len(field_value)):
+                        field_value2.append({})
+                        for field2 in field_value[i]:
+                            if obj._columns[field2]._type \
+                                    in ('many2one', 'one2one'):
+                                obj2 = self.pool.get(obj._columns[field2]._obj)
+                                if not obj2.search(cursor, user,
+                                        [('id', '=', field_value[i][field2])]):
+                                    continue
+                            # TODO add test for many2many and one2many
+                            field_value2[i][field2] = field_value[i][field2]
+                    field_value = field_value2
+                value[field] = field_value
+        return value
+
+    def unlink(self, cursor, user, ids, context=None):
+        if context is None:
+            context = {}
+        if not ids:
+            return True
+        if isinstance(ids, (int, long)):
+            ids = [ids]
+        delta = context.get('read_delta', False)
+        if delta and self._log_access:
+            cursor.execute(
+                    "SELECT (now()  - min(write_date)) <= '%s'::interval " \
+                    "FROM \"%s\" WHERE id in (%s)" % \
+                    (delta, self._table, ",".join([str(x) for x in ids])))
+            res = cursor.fetchone()
+            if res and res[0]:
+                raise ExceptORM('ConcurrencyException',
+                        'This record was modified in the meanwhile')
+
+        self.pool.get('ir.model.access').check(cursor, user, self._name,
+                'unlink')
+
+        wf_service = LocalService("workflow")
+        for obj_id in ids:
+            wf_service.trg_delete(user, self._name, obj_id, cursor)
+        str_d = ','.join(('%d',) * len(ids))
+
+        #cursor.execute('select * from ' + self._table + \
+        #       ' where id in ('+str_d+')', ids)
+        #res = cursor.dictfetchall()
+        #for key in self._inherits:
+        #    ids2 = [x[self._inherits[key]] for x in res]
+        #    self.pool.get(key).unlink(cursor, user, ids2)
+
+        domain1, domain2 = self.pool.get('ir.rule').domain_get(cursor, user,
+                self._name)
+        if domain1:
+            domain1 = ' AND ' + domain1
+            cursor.execute('SELECT id FROM "'+self._table+'" ' \
+                    'WHERE id IN (' + str_d + ') ' + domain1, ids + domain2)
+            if not cursor.rowcount == len({}.fromkeys(ids)):
+                raise ExceptORM('AccessError',
+                        'You try to bypass an access rule ' \
+                               '(Document type: %s).' % self._description)
+
+        cursor.execute('DELETE FROM inherit ' \
+                'WHERE (obj_type = %s AND obj_id IN ('+str_d+')) ' \
+                    'OR (inst_type = %s AND inst_id IN ('+str_d+'))',
+                    ((self._name,) + tuple(ids) + (self._name,) + tuple(ids)))
+        cursor.execute('DELETE FROM "'+self._table+'" ' \
+                'WHERE id IN (' + str_d + ') ' + domain1, ids + domain2)
+        return True
+
+    # TODO: Validate
+    def write(self, cursor, user, ids, vals, context=None):
+        if context is None:
+            context = {}
+        if not ids:
+            return True
+        if isinstance(ids, (int, long)):
+            ids = [ids]
+        delta = context.get('read_delta', False)
+        if delta and self._log_access:
+            cursor.execute("select (now() - min(write_date)) <= '%s'::interval"\
+                    " FROM %s WHERE id IN (%s)" % \
+                    (delta, self._table, ",".join([str(x) for x in ids])))
+            res = cursor.fetchone()
+            if res and res[0]:
+                for field in vals:
+                    if field in self._columns \
+                            and self._columns[field]._classic_write:
+                        raise ExceptORM('ConcurrencyException',
+                                'This record was modified in the meanwhile')
+
+        self.pool.get('ir.model.access').check(cursor, user, self._name,
+                'write')
+
+        #for v in self._inherits.values():
+        #    assert v not in vals, (v, vals)
+        ids_str = ','.join([str(x) for x in ids])
+        upd0 = []
+        upd1 = []
+        upd_todo = []
+        updend = []
+        direct = []
+        totranslate = context.get('lang', False) \
+                and (context['lang'] != 'en_US')
+        for field in vals:
+            if field in self._columns:
+                if self._columns[field]._classic_write:
+                    if (not totranslate) or not self._columns[field].translate:
+                        upd0.append('"' + field + '"=' + \
+                                self._columns[field]._symbol_set[0])
+                        upd1.append(self._columns[field]._symbol_set[1](
+                            vals[field]))
+                    direct.append(field)
+                else:
+                    upd_todo.append(field)
+            else:
+                updend.append(field)
+            if field in self._columns \
+                    and hasattr(self._columns[field], 'selection') \
+                    and vals[field]:
+                if self._columns[field]._type == 'reference':
+                    val = vals[field].split(',')[0]
+                else:
+                    val = vals[field]
+                if isinstance(self._columns[field].selection, (tuple, list)):
+                    if val not in dict(self._columns[field].selection):
+                        raise ExceptORM('ValidateError',
+                        'The value "%s" for the field "%s" ' \
+                                'is not in the selection' % \
+                                (vals[field], field))
+                else:
+                    if val not in dict(self._columns[field].selection(
+                        self, cursor, user, context=context)):
+                        raise ExceptORM('ValidateError',
+                        'The value "%s" for the field "%s" ' \
+                                'is not in the selection' % \
+                                (vals[field], field))
+
+        if self._log_access:
+            upd0.append('write_uid=%d')
+            upd0.append('write_date=now()')
+            upd1.append(user)
+
+        if len(upd0):
+            domain1, domain2 = self.pool.get('ir.rule').domain_get(cursor,
+                    user, self._name)
+            if domain1:
+                domain1 = ' and ' + domain1
+                cursor.execute('SELECT id FROM "' + self._table + '" ' \
+                        'WHERE id IN (' + ids_str + ') ' + domain1, domain2)
+                if not cursor.rowcount == len({}.fromkeys(ids)):
+                    raise ExceptORM('AccessError',
+                            'You try to bypass an access rule ' \
+                                    '(Document type: %s).' % self._description)
+            else:
+                cursor.execute('SELECT id FROM "' + self._table + '" ' \
+                        'WHERE id IN (' + ids_str + ')')
+                if not cursor.rowcount == len({}.fromkeys(ids)):
+                    raise ExceptORM('AccessError',
+                            'You try to write on an record ' \
+                                'that doesn\'t exist (Document type: %s).' % \
+                                    self._description)
+            cursor.execute('UPDATE "' + self._table + '" ' \
+                    'SET ' + ','.join(upd0) + ' ' \
+                    'WHERE id IN (' + ids_str + ') ' + domain1, upd1 + domain2)
+
+            if totranslate:
+                for field in direct:
+                    if self._columns[field].translate:
+                        self.pool.get('ir.translation')._set_ids(cursor, user,
+                                self._name + ',' + field, 'model',
+                                context['lang'], ids, vals[field])
+
+        # call the 'set' method of fields which are not classic_write
+        upd_todo.sort(lambda x, y: self._columns[x].priority - \
+                self._columns[y].priority)
+        for field in upd_todo:
+            for select_id in ids:
+                self._columns[field].set(cursor, self, select_id, field,
+                        vals[field], user, context=context)
+
+        for table in self._inherits:
+            col = self._inherits[table]
+            cursor.execute('SELECT DISTINCT "' + col + '" ' \
+                    'FROM "' + self._table + '" WHERE id IN (' + ids_str + ')',
+                    upd1)
+            nids = [x[0] for x in cursor.fetchall()]
+
+            vals2 = {}
+            for val in updend:
+                if self._inherit_fields[val][0] == table:
+                    vals2[val] = vals[val]
+            self.pool.get(table).write(cursor, user, nids, vals2,
+                    context=context)
+
+        self._validate(cursor, user, ids)
+
+        if context.has_key('read_delta'):
+            del context['read_delta']
+
+        wf_service = LocalService("workflow")
+        for obj_id in ids:
+            wf_service.trg_write(user, self._name, obj_id, cursor)
+        self._update_function_stored(cursor, user, ids, context=context)
+        return True
+
+    def create(self, cursor, user, vals, context=None):
+        """
+        cursor = database cursor
+        user = user id
+        vals = dictionary of the form {'field_name': field_value, ...}
+        """
+        if context is None:
+            context = {}
+        self.pool.get('ir.model.access').check(cursor, user, self._name,
+                'create')
+
+        default = []
+        avoid_table = []
+        for (i, j) in self._inherits.items():
+            if j in vals:
+                avoid_table.append(i)
+        for i in self._columns.keys(): # + self._inherit_fields.keys():
+            if not i in vals:
+                default.append(i)
+        for i in self._inherit_fields.keys():
+            if (not i in vals) \
+                    and (not self._inherit_fields[i][0] in avoid_table):
+                default.append(i)
+
+        if len(default):
+            vals.update(self.default_get(cursor, user, default, context))
+
+        tocreate = {}
+        for i in self._inherits:
+            if self._inherits[i] not in vals:
+                tocreate[i] = {}
+
+        (upd0, upd1, upd2) = ('', '', [])
+        upd_todo = []
+
+        for i in vals.keys():
+            if i in self._inherit_fields:
+                (table, col, col_detail) = self._inherit_fields[i]
+                tocreate[table][i] = vals[i]
+                del vals[i]
+
+        cursor.execute("SELECT NEXTVAL('" + self._sequence + "')")
+        (id_new,) = cursor.fetchone()
+        for table in tocreate:
+            new_id = self.pool.get(table).create(cursor, user, tocreate[table])
+            upd0 += ',' + self._inherits[table]
+            upd1 += ',%d'
+            upd2.append(new_id)
+            cursor.execute('INSERT INTO inherit ' \
+                    '(obj_type, obj_id, inst_type, inst_id) ' \
+                    'values (%s, %d, %s, %d)',
+                    (table, new_id, self._name, id_new))
+
+        for field in vals:
+            if self._columns[field]._classic_write:
+                upd0 = upd0 + ',"' + field + '"'
+                upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
+                upd2.append(self._columns[field]._symbol_set[1](vals[field]))
+            else:
+                upd_todo.append(field)
+            if field in self._columns \
+                    and hasattr(self._columns[field], 'selection') \
+                    and vals[field]:
+                if self._columns[field]._type == 'reference':
+                    val = vals[field].split(',')[0]
+                else:
+                    val = vals[field]
+                if isinstance(self._columns[field].selection, (tuple, list)):
+                    if val not in dict(self._columns[field].selection):
+                        raise ExceptORM('ValidateError',
+                        'The value "%s" for the field "%s" ' \
+                                'is not in the selection' % \
+                                (vals[field], field))
+                else:
+                    if val not in dict(self._columns[field].selection(
+                        self, cursor, user, context=context)):
+                        raise ExceptORM('ValidateError',
+                        'The value "%s" for the field "%s" ' \
+                                'is not in the selection' % \
+                                (vals[field], field))
+        if self._log_access:
+            upd0 += ', create_uid, create_date'
+            upd1 += ', %d, now()'
+            upd2.append(user)
+        cursor.execute('INSERT INTO "' + self._table + '" ' \
+                '(id' + upd0 + ') ' \
+                'VALUES (' + str(id_new) + upd1 + ')', tuple(upd2))
+        upd_todo.sort(lambda x, y: self._columns[x].priority - \
+                self._columns[y].priority)
+        for field in upd_todo:
+            self._columns[field].set(cursor, self, id_new, field, vals[field],
+                    user, context)
+
+        self._validate(cursor, user, [id_new])
+
+        wf_service = LocalService("workflow")
+        wf_service.trg_create(user, self._name, id_new, cursor)
+        self._update_function_stored(cursor, user, [id_new], context=context)
+        return id_new
+
+    def _update_function_stored(self, cursor, user, ids, context=None):
+        if context is None:
+            context = {}
+        ffields = [x for x in self._columns if isinstance(self._columns[x],
+            fields.function) and self._columns[x].store]
+        if ffields:
+            result = self.read(cursor, user, ids, fields_names=ffields,
+                    context=context)
+            for res in result:
+                upd0 = []
+                upd1 = []
+                for field in res:
+                    if field not in ffields:
+                        continue
+                    value = res[field]
+                    if self._columns[field]._type in ('many2one', 'one2one'):
+                        value = res[field][0]
+                    upd0.append('"' + field + '"=' + \
+                            self._columns[field]._symbol_set[0])
+                    upd1.append(self._columns[field]._symbol_set[1](value))
+                upd1.append(res['id'])
+                cursor.execute('update "' + self._table + '" set ' + \
+                        ','.join(upd0) + ' where id = %d', upd1)
+        return True
+
+    def fields_get(self, cursor, user, fields_names=None, context=None):
+        """
+        returns the definition of each field in the object
+        the optional fields_names parameter can limit the result to some fields
+        """
+        if context is None:
+            context = {}
+        res = {}
+        translation_obj = self.pool.get('ir.translation')
+        model_access_obj = self.pool.get('ir.model.access')
+        for parent in self._inherits:
+            res.update(self.pool.get(parent).fields_get(cursor, user,
+                fields_names, context))
+        read_access = model_access_obj.check(cursor, user, self._name, 'write',
+                raise_exception=False)
+        for field in self._columns.keys():
+            res[field] = {'type': self._columns[field]._type}
+            for arg in (
+                    'string',
+                    'readonly',
+                    'states',
+                    'size',
+                    'required',
+                    'change_default',
+                    'translate',
+                    'help',
+                    'select',
+                    ):
+                if getattr(self._columns[field], arg):
+                    res[field][arg] = getattr(self._columns[field], arg)
+            if not read_access:
+                res[field]['readonly'] = True
+                res[field]['states'] = {}
+            for arg in ('digits', 'invisible'):
+                if hasattr(self._columns[field], arg) \
+                        and getattr(self._columns[field], arg):
+                    res[field][arg] = getattr(self._columns[field], arg)
+
+            # translate the field label
+            res_trans = translation_obj._get_source(cursor,
+                    self._name + ',' + field, 'field',
+                    context.get('lang', 'en_US'))
+            if res_trans:
+                res[field]['string'] = res_trans
+            help_trans = translation_obj._get_source(cursor,
+                    self._name + ',' + field, 'help',
+                    context.get('lang', 'en_US'))
+            if help_trans:
+                res[field]['help'] = help_trans
+
+            if hasattr(self._columns[field], 'selection'):
+                if isinstance(self._columns[field].selection, (tuple, list)):
+                    sel = self._columns[field].selection
+                    # translate each selection option
+                    sel2 = []
+                    for (key, val) in sel:
+                        val2 = translation_obj._get_source(cursor,
+                                self._name + ',' + field, 'selection',
+                                context.get('lang', 'en_US'), val)
+                        sel2.append((key, val2 or val))
+                    sel = sel2
+                    res[field]['selection'] = sel
+                else:
+                    # call the 'dynamic selection' function
+                    res[field]['selection'] = self._columns[field].selection(
+                            self, cursor, user, context)
+            if res[field]['type'] in (
+                    'one2many',
+                    'many2many',
+                    'many2one',
+                    'one2one',
+                    ):
+                res[field]['relation'] = self._columns[field]._obj
+                res[field]['domain'] = self._columns[field]._domain
+                res[field]['context'] = self._columns[field]._context
+
+        if fields_names:
+            # filter out fields which aren't in the fields_names list
+            for i in res.keys():
+                if i not in fields_names:
+                    del res[i]
+        return res
+
+    def view_header_get(self, cursor, user, view_id=None, view_type='form',
+            context=None):
+        """
+        Overload this method if you need a window title
+        which depends on the context
+        """
+        return False
+
+    def __view_look_dom(self, cursor, user, node, context=None):
+        if context is None:
+            context = {}
+        result = False
+        fields_attrs = {}
+        childs = True
+        if node.nodeType == node.ELEMENT_NODE and node.localName == 'field':
+            if node.hasAttribute('name'):
+                attrs = {}
+                try:
+                    if node.getAttribute('name') in self._columns:
+                        relation = self._columns[node.getAttribute('name')]._obj
+                    else:
+                        relation = self._inherit_fields[node.getAttribute(
+                            'name')][2]._obj
+                except:
+                    relation = False
+                if relation:
+                    childs = False
+                    views = {}
+                    for field in node.childNodes:
+                        if field.nodeType == field.ELEMENT_NODE \
+                                and field.localName in ('form', 'tree'):
+                            node.removeChild(field)
+                            xarch, xfields = self.pool.get(relation
+                                    ).__view_look_dom_arch(cursor, user, field,
+                                            context)
+                            views[str(field.localName)] = {
+                                'arch': xarch,
+                                'fields': xfields
+                            }
+                    attrs = {'views': views}
+                fields_attrs[node.getAttribute('name')] = attrs
+
+        elif node.nodeType == node.ELEMENT_NODE \
+                and node.localName in ('form','tree'):
+            result = self.view_header_get(cursor, user, False, node.localName,
+                    context)
+            if result:
+                node.setAttribute('string', result)
+
+        if node.nodeType == node.ELEMENT_NODE:
+            # translate view
+            translation_obj = self.pool.get('ir.translation')
+            if ('lang' in context) and not result:
+                if node.hasAttribute('string') and node.getAttribute('string'):
+                    trans = translation_obj._get_source(cursor,
+                            self._name, 'view', context['lang'],
+                            node.getAttribute('string').encode('utf8'))
+                    if trans:
+                        node.setAttribute('string', trans.decode('utf8'))
+                if node.hasAttribute('sum') and node.getAttribute('sum'):
+                    trans = translation_obj._get_source(cursor,
+                            self._name, 'view', context['lang'],
+                            node.getAttribute('sum').encode('utf8'))
+                    if trans:
+                        node.setAttribute('sum', trans.decode('utf8'))
+            # Add view for properties !
+            if node.localName == 'properties':
+                parent = node.parentNode
+                doc = node.ownerDocument
+                models = ["'" + x + "'" for x in  [self._name] + \
+                        self._inherits.keys()]
+                cursor.execute('SELECT name, group_name ' \
+                        'FROM ir_model_fields ' \
+                        'WHERE model in (' + ','.join(models) + ') ' \
+                            'AND view_load ORDER BY group_name, id')
+                oldgroup = None
+                for fname, gname in cursor.fetchall():
+                    if oldgroup != gname:
+                        child = doc.createElement('separator')
+                        child.setAttribute('string', gname)
+                        child.setAttribute('colspan', "4")
+                        oldgroup = gname
+                        parent.insertBefore(child, node)
+
+                    child = doc.createElement('field')
+                    child.setAttribute('name', fname)
+                    parent.insertBefore(child, node)
+                parent.removeChild(node)
+
+        if childs:
+            for field in node.childNodes:
+                fields_attrs.update(self.__view_look_dom(cursor, user, field,
+                    context))
+        return fields_attrs
+
+    def __view_look_dom_arch(self, cursor, user, node, context=None):
+        if context is None:
+            context = {}
+        fields_def = self.__view_look_dom(cursor, user, node, context=context)
+        arch = node.toxml(encoding="utf-8").replace('\t', '')
+        fields2 = self.fields_get(cursor, user, fields_def.keys(), context)
+        for field in fields_def:
+            fields2[field].update(fields_def[field])
+        return arch, fields2
+
+    def fields_view_get(self, cursor, user, view_id=None, view_type='form',
+            context=None, toolbar=False):
+        if context is None:
+            context = {}
+
+        def _inherit_apply(src, inherit):
+
+            def _find(node, node2):
+                if node.nodeType == node.ELEMENT_NODE \
+                        and node.localName == node2.localName:
+                    res = True
+                    for attr in node2.attributes.keys():
+                        if attr == 'position':
+                            continue
+                        if node.hasAttribute(attr):
+                            if node.getAttribute(attr) == \
+                                    node2.getAttribute(attr):
+                                continue
+                        res = False
+                    if res:
+                        return node
+                for child in node.childNodes:
+                    res = _find(child, node2)
+                    if res:
+                        return res
+                return None
+
+            doc_src = dom.minidom.parseString(src)
+            doc_dest = dom.minidom.parseString(inherit)
+            for node2 in doc_dest.childNodes:
+                if not node2.nodeType == node2.ELEMENT_NODE:
+                    continue
+                node = _find(doc_src, node2)
+                if node:
+                    pos = 'inside'
+                    if node2.hasAttribute('position'):
+                        pos = node2.getAttribute('position')
+                    if pos == 'replace':
+                        parent = node.parentNode
+                        for child in node2.childNodes:
+                            if child.nodeType == child.ELEMENT_NODE:
+                                parent.insertBefore(child, node)
+                        parent.removeChild(node)
+                    else:
+                        for child in node2.childNodes:
+                            if child.nodeType == child.ELEMENT_NODE:
+                                if pos == 'inside':
+                                    node.appendChild(child)
+                                elif pos == 'after':
+                                    sib = node.nextSibling
+                                    if sib:
+                                        node.parentNode.insertBefore(child, sib)
+                                    else:
+                                        node.parentNode.appendChild(child)
+                                elif pos == 'before':
+                                    node.parentNode.insertBefore(child, node)
+                                else:
+                                    raise AttributeError, \
+                                            'Unknown position ' \
+                                            'in inherited view %s!' % pos
+                else:
+                    attrs = ''.join([
+                        ' %s="%s"' % (attr, node2.getAttribute(attr))
+                        for attr in node2.attributes.keys()
+                        if attr != 'position'
+                    ])
+                    tag = "<%s%s>" % (node2.localName, attrs)
+                    raise AttributeError, \
+                            "Couldn't find tag '%s' in parent view !" % tag
+            return doc_src.toxml(encoding="utf-8").replace('\t', '')
+
+        result = {'type': view_type, 'model': self._name}
+
+        test = True
+        model = True
+        sql_res = False
+        while test:
+            if view_id:
+                where = (model and (" and model='%s'" % (self._name,))) or ''
+                cursor.execute('SELECT arch, name, field_parent, id, type, ' \
+                            'inherit_id ' \
+                        'FROM ir_ui_view WHERE id = %d ' + where, (view_id,))
+            else:
+                cursor.execute('SELECT arch, name, field_parent, id, type, ' \
+                        'inherit_id ' \
+                        'FROM ir_ui_view ' \
+                        'WHERE model = %s AND type = %s ORDER BY priority',
+                        (self._name,view_type))
+            sql_res = cursor.fetchone()
+            if not sql_res:
+                break
+            test = sql_res[5]
+            view_id = test or sql_res[3]
+            model = False
+
+        # if a view was found
+        if sql_res:
+            result['type'] = sql_res[4]
+            result['view_id'] = sql_res[3]
+            result['arch'] = sql_res[0]
+
+            def _inherit_apply_rec(result, inherit_id):
+                # get all views which inherit from (ie modify) this view
+                cursor.execute('SELECT arch, id FROM ir_ui_view ' \
+                        'WHERE inherit_id = %d AND model = %s ' \
+                        'ORDER BY priority', (inherit_id, self._name))
+                sql_inherit = cursor.fetchall()
+                for (inherit, view_id) in sql_inherit:
+                    result = _inherit_apply(result, inherit)
+                    result = _inherit_apply_rec(result, view_id)
+                return result
+
+            result['arch'] = _inherit_apply_rec(result['arch'], sql_res[3])
+
+            result['name'] = sql_res[1]
+            result['field_parent'] = sql_res[2] or False
+        # otherwise, build some kind of default view
+        else:
+            if view_type == 'form':
+                res = self.fields_get(cursor, user, context=context)
+                xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
+                '''<form string="%s">''' % (self._description,)
+                for i in res:
+                    if res[i]['type'] not in ('one2many', 'many2many'):
+                        xml += '<field name="%s"/>' % (i,)
+                        if res[i]['type'] == 'text':
+                            xml += "<newline/>"
+                xml += "</form>"
+            elif view_type == 'tree':
+                xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
+                '''<tree string="%s"><field name="%s"/></tree>''' \
+                % (self._description, self._rec_name)
+            elif view_type == 'calendar':
+                xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
+                '''<calendar string="%s" date_start="%s">''' \
+                '''<field name="%s"/></calendar>''' \
+                % (self._description, self._date_name, self._rec_name)
+            else:
+                xml = ''
+            result['arch'] = xml
+            result['name'] = 'default'
+            result['field_parent'] = False
+            result['view_id'] = 0
+
+        doc = dom.minidom.parseString(result['arch'])
+        xarch, xfields = self.__view_look_dom_arch(cursor, user, doc,
+                context=context)
+        result['arch'] = xarch
+        result['fields'] = xfields
+        if toolbar:
+
+            def clean(i):
+                i = i[2]
+                for key in (
+                        'report_sxw_content',
+                        'report_rml_content',
+                        'report_sxw', 'report_rml',
+                        'report_sxw_content_data',
+                        'report_rml_content_data',
+                        ):
+                    if key in i:
+                        del i[key]
+                return i
+
+            ir_values_obj = self.pool.get('ir.values')
+            resprint = ir_values_obj.get(cursor, user, 'action',
+                    'client_print_multi', [(self._name, False)], False,
+                    context)
+            resaction = ir_values_obj.get(cursor, user, 'action',
+                    'client_action_multi', [(self._name, False)], False,
+                    context)
+            resrelate = ir_values_obj.get(cursor, user, 'action',
+                    'client_action_relate', [(self._name, False)], False,
+                    context)
+            resprint = [clean(x) for x in resprint]
+            resaction = [clean(x) for x in resaction]
+            resaction = [x for x in resaction if not x.get('multi', False)]
+            resprint = [x for x in resprint if not x.get('multi', False)]
+            resrelate = [x[2]  for x in resrelate]
+            for i in resprint + resaction + resrelate:
+                i['string'] = i['name']
+            result['toolbar'] = {
+                'print': resprint,
+                'action': resaction,
+                'relate': resrelate,
+            }
+        return result
+
+    _view_look_dom_arch = __view_look_dom_arch
+
+    def _where_calc(self, cursor, user, args, active_test=True, context=None):
+        if context is None:
+            context = {}
+        args = args[:]
+        # if the object has a field named 'active', filter out all inactive
+        # records unless they were explicitely asked for
+        if 'active' in self._columns \
+                and (active_test and context.get('active_test', True)):
+            i = 0
+            active_found = False
+            while i < len(args):
+                if args[i][0] == 'active':
+                    active_found = True
+                i += 1
+            if not active_found:
+                args.append(('active', '=', 1))
+
+        i = 0
+        tables = ['"' + self._table + '"']
+        joins = []
+        while i < len(args):
+            table = self
+            if args[i][0] in self._inherit_fields:
+                table = self.pool.get(self._inherit_fields[args[i][0]][0])
+                if ('"' + table._table + '"' not in tables):
+                    tables.append('"' + table._table + '"')
+                    joins.append(('id', 'join', '%s.%s' % \
+                            (self._table, self._inherits[table._name]), table))
+            fargs = args[i][0].split('.', 1)
+            field = table._columns.get(fargs[0], False)
+            if not field:
+                i += 1
+                continue
+            if len(fargs) > 1:
+                if field._type == 'many2one':
+                    args[i] = (fargs[0], 'in',
+                            self.pool.get(field._obj).search(cursor, user,
+                                [(fargs[1], args[i][1], args[i][2])],
+                                context=context))
+                    i += 1
+                    continue
+                else:
+                    i += 1
+                    continue
+            if field._properties:
+                arg = [args.pop(i)]
+                j = i
+                while j < len(args):
+                    if args[j][0] == arg[0][0]:
+                        arg.append(args.pop(j))
+                    else:
+                        j += 1
+                if field._fnct_search:
+                    args.extend(field.search(cursor, user, self,
+                        arg[0][0], arg))
+            elif field._type == 'one2many':
+                field_obj = self.pool.get(field._obj)
+
+                if isinstance(args[i][2], basestring):
+                    # get the ids of the records of the "distant" resource
+                    ids2 = [x[0] for x in field_obj.name_search(cursor, user,
+                        args[i][2], [], args[i][1])]
+                else:
+                    ids2 = args[i][2]
+                if not ids2:
+                    args[i] = ('id', '=', '0')
+                else:
+                    cursor.execute('SELECT "' + field._fields_id + \
+                            '" FROM "' + field_obj._table + '" ' \
+                            'WHERE id IN (' + \
+                                ','.join([str(x) for x in ids2]) + ')')
+                    ids3 = [x[0] for x in cursor.fetchall()]
+
+                    args[i] = ('id', 'in', ids3)
+                i += 1
+            elif field._type == 'many2many':
+                #FIXME
+                if args[i][1] == 'child_of':
+                    if isinstance(args[i][2], basestring):
+                        ids2 = [x[0] for x in self.pool.get(
+                        field._obj).name_search(cursor, user, args[i][2], [],
+                            'like')]
+                    else:
+                        ids2 = args[i][2]
+
+                    def _rec_get(ids, table, parent):
+                        if not ids:
+                            return []
+                        ids2 = table.search(cursor, user,
+                                [(parent, 'in', ids)], context=context)
+                        return ids + _rec_get(ids2, table, parent)
+
+                    def _rec_convert(ids):
+                        if self.pool.get(field._obj)==self:
+                            return ids
+                        if not len(ids):
+                            return []
+                        cursor.execute('SELECT "' + field._id1 + '" ' \
+                                'FROM "' + field._rel + '" ' \
+                                'WHERE "' + field._id2 + '" IN (' + \
+                                    ','.join([str(x) for x in ids]) + ')')
+                        ids = [x[0] for x in cursor.fetchall()]
+                        return ids
+
+                    args[i] = ('id', 'in', _rec_convert(ids2 + _rec_get(ids2,
+                        self.pool.get(field._obj), table._parent_name)))
+                else:
+                    if isinstance(args[i][2], basestring):
+                        res_ids = [x[0] for x in self.pool.get(field._obj
+                            ).name_search(cursor, user, args[i][2], [],
+                                args[i][1])]
+                    else:
+                        res_ids = args[i][2]
+                    if not len(res_ids):
+                        args[i] = ('id', 'in', [0])
+                    else:
+                        cursor.execute('SELECT "' + field._id1 + '" ' \
+                                'FROM "' + field._rel + '" ' \
+                                'WHERE "' + field._id2 + '" IN (' + \
+                                    ','.join([str(x) for x in res_ids]) + ')')
+                        args[i] = ('id', 'in',
+                                [x[0] for x in cursor.fetchall()])
+                i += 1
+
+            elif field._type == 'many2one':
+                if args[i][1] == 'child_of':
+                    if isinstance(args[i][2], basestring):
+                        ids2 = [x[0] for x in self.pool.get(
+                            field._obj).name_search(cursor, user, args[i][2],
+                                [], 'like')]
+                    else:
+                        ids2 = args[i][2]
+
+                    def _rec_get(ids, table, parent):
+                        if not ids:
+                            return []
+                        ids2 = table.search(cursor, user,
+                                [(parent, 'in', ids)], context=context)
+                        return ids + _rec_get(ids2, table, parent)
+
+                    if field._obj != table._name:
+                        args[i] = (args[i][0], 'in', ids2 + _rec_get(ids2,
+                            self.pool.get(field._obj), table._parent_name),
+                            table)
+                    else:
+                        args[i] = ('id', 'in', ids2 + _rec_get(ids2, table,
+                            args[i][0]), table)
+                else:
+                    if isinstance(args[i][2], basestring):
+                        res_ids = self.pool.get(field._obj).name_search(cursor,
+                                user, args[i][2], [], args[i][1])
+                        args[i] = (args[i][0], 'in', [x[0] for x in res_ids],
+                                table)
+                    else:
+                        args[i] += (table,)
+                i += 1
+            else:
+                if field.translate:
+                    if args[i][1] in ('like', 'ilike'):
+                        args[i] = (args[i][0], args[i][1],
+                                '%%%s%%' % args[i][2])
+                    cursor.execute('SELECT res_id FROM ir_translation ' \
+                            'WHERE name = %s AND lang = %s ' \
+                                'AND type = %s ' \
+                                'AND value ' + args[i][1] + ' %s',
+                            (table._name + ',' + args[i][0],
+                                context.get('lang', 'en_US'), 'model',
+                                args[i][2]))
+                    ids = [x[0] for x in cursor.fetchall()]
+                    cursor.execute('SELECT id FROM "' + table._table + '" ' \
+                            'WHERE "' + args[i][0]+'" '+args[i][1]+' %s',
+                            (args[i][2],))
+                    ids += [x[0] for x in cursor.fetchall()]
+                    args[i] = ('id', 'in', ids, table)
+                else:
+                    args[i] += (table,)
+                i += 1
+        args.extend(joins)
+
+        qu1, qu2 = [], []
+        for arg in args:
+            table = self
+            if len(arg) > 3:
+                table = arg[3]
+            if arg[1] != 'in':
+                if (arg[2] is False) and (arg[1] == '='):
+                    qu1.append(arg[0] + ' is null')
+                elif (arg[2] is False) and (arg[1] == '<>' or arg[1] == '!='):
+                    qu1.append(arg[0] + ' is not null')
+                else:
+                    if arg[0] == 'id':
+                        if arg[1] == 'join':
+                            qu1.append('(%s.%s = %s)' % \
+                                    (table._table, arg[0], arg[2]))
+                        else:
+                            qu1.append('(%s.%s %s %%s)' % \
+                                    (table._table, arg[0], arg[1]))
+                            qu2.append(arg[2])
+                    else:
+                        add_null = False
+                        if arg[1] in ('like', 'ilike'):
+                            if isinstance(arg[2], str):
+                                str_utf8 = arg[2]
+                            elif isinstance(arg[2], unicode):
+                                str_utf8 = arg[2].encode('utf-8')
+                            else:
+                                str_utf8 = str(arg[2])
+                            qu2.append('%%%s%%' % str_utf8)
+                            if not str_utf8:
+                                add_null = True
+                        else:
+                            if arg[0] in table._columns:
+                                qu2.append(table._columns[arg[0]].\
+                                        _symbol_set[1](arg[2]))
+                        if arg[1] == '=like':
+                            arg1 = 'like'
+                        else:
+                            arg1 = arg[1]
+                        if arg[0] in table._columns:
+                            if arg[1] in ('like', 'ilike'):
+                                qu1.append('(%s.%s %s %s)' % (table._table,
+                                    arg[0], arg1, '%s'))
+                            else:
+                                qu1.append('(%s.%s %s %s)' % (table._table,
+                                    arg[0], arg1,
+                                    table._columns[arg[0]]._symbol_set[0]))
+                        else:
+                            qu1.append('(%s.%s %s \'%s\')' % \
+                                    (table._table, arg[0], arg1, arg[2]))
+
+                        if add_null:
+                            qu1[-1] = '('+qu1[-1]+' or '+arg[0]+' is null)'
+            elif arg[1] == 'in':
+                if len(arg[2]) > 0:
+                    todel = []
+                    for xitem in range(len(arg[2])):
+                        if arg[2][xitem] == False \
+                                and isinstance(arg[2][xitem],bool):
+                            todel.append(xitem)
+                    for xitem in todel[::-1]:
+                        del arg[2][xitem]
+                    if arg[0] == 'id':
+                        qu1.append('(%s.id in (%s))' % \
+                                (table._table,
+                                    ','.join(['%d'] * len(arg[2])),))
+                    else:
+                        qu1.append('(%s.%s in (%s))' % \
+                                (table._table, arg[0], ','.join(
+                                    [table._columns[arg[0]].\
+                                            _symbol_set[0]] * len(arg[2]))))
+                    if todel:
+                        qu1[-1] = '(' + qu1[-1] + ' or ' + arg[0] + ' is null)'
+                    qu2 += arg[2]
+                else:
+                    qu1.append(' false')
+        return (qu1, qu2, tables)
+
+    def search_count(self, cursor, user, args, context=None):
+        if context is None:
+            context = {}
+        res = self.search(cursor, user, args, context=context, count=True)
+        if isinstance(res, list):
+            return len(res)
+        return res
+
+    def search(self, cursor, user, args, offset=0, limit=None, order=None,
+            context=None, count=False):
+        if context is None:
+            context = {}
+        # compute the where, order by, limit and offset clauses
+        (qu1, qu2, tables) = self._where_calc(cursor, user, args,
+                context=context)
+
+        if len(qu1):
+            qu1 = ' WHERE ' + ' AND '.join(qu1)
+        else:
+            qu1 = ''
+        order_by = order or self._order
+
+        limit_str = limit and ' LIMIT %d' % limit or ''
+        offset_str = offset and ' OFFSET %d' % offset or ''
+
+
+        # construct a clause for the rules :
+        domain1, domain2 = self.pool.get('ir.rule').domain_get(cursor, user,
+                self._name)
+        if domain1:
+            qu1 = qu1 and qu1 + ' AND ' + domain1 or ' WHERE ' + domain1
+            qu2 += domain2
+
+        if count:
+            cursor.execute('SELECT COUNT(%s.id) FROM ' % self._table +
+                    ','.join(tables) + qu1 + limit_str + offset_str, qu2)
+            res = cursor.fetchall()
+            return res[0][0]
+        # execute the "main" query to fetch the ids we were searching for
+        cursor.execute('SELECT %s.id FROM ' % self._table +
+                ','.join(tables) + qu1 + ' order by ' + order_by + limit_str +
+                offset_str, qu2)
+        res = cursor.fetchall()
+        return [x[0] for x in res]
+
+    def name_get(self, cursor, user, ids, context=None):
+        if context is None:
+            context = {}
+        if not ids:
+            return []
+        if isinstance(ids, (int, long)):
+            ids = [ids]
+        return [(r['id'], str(r[self._rec_name])) for r in self.read(cursor,
+            user, ids, [self._rec_name], context, load='_classic_write')]
+
+    def name_search(self, cursor, user, name='', args=None, operator='ilike',
+            context=None, limit=80):
+        if args is None:
+            args = []
+        if context is None:
+            context = {}
+        args = args[:]
+        if name:
+            args += [(self._rec_name, operator, name)]
+        ids = self.search(cursor, user, args, limit=limit, context=context)
+        res = self.name_get(cursor, user, ids, context)
+        return res
+
+    def copy(self, cursor, user, object_id, default=None, context=None):
+        if context is None:
+            context = {}
+        if default is None:
+            default = {}
+        if 'state' not in default:
+            if 'state' in self._defaults:
+                default['state'] = self._defaults['state'](self, cursor, user,
+                        context)
+        data = self.read(cursor, user, object_id, context=context)
+        fields2 = self.fields_get(cursor, user)
+        for field in fields2:
+            ftype = fields2[field]['type']
+
+            if self._log_access \
+                     and (field in (
+                         'create_date',
+                         'create_uid',
+                         'write_date',
+                         'write_uid',
+                         )):
+                del data[field]
+
+            if field in default:
+                data[field] = default[field]
+            elif ftype == 'function':
+                del data[field]
+            elif ftype == 'many2one':
+                try:
+                    data[field] = data[field] and data[field][0]
+                except:
+                    pass
+            elif ftype in ('one2many', 'one2one'):
+                res = []
+                rel = self.pool.get(fields2[field]['relation'])
+                for rel_id in data[field]:
+                    # the lines are first duplicated using the wrong (old) 
+                    # parent but then are reassigned to the correct one thanks
+                    # to the (4, ...)
+                    res.append((4, rel.copy(cursor, user, rel_id,
+                        context=context)))
+                data[field] = res
+            elif ftype == 'many2many':
+                data[field] = [(6, 0, data[field])]
+        del data['id']
+        for i in self._inherits:
+            del data[self._inherits[i]]
+        return self.create(cursor, user, data)
+
+    def read_string(self, cursor, user, object_id, langs, fields_names=None,
+            context=None):
+        if context is None:
+            context = {}
+        res = {}
+        res2 = {}
+        self.pool.get('ir.model.access').check(cursor, user, 'ir.translation',
+                'read')
+        if fields_names is None:
+            fields_names = self._columns.keys() + self._inherit_fields.keys()
+        for lang in langs:
+            res[lang] = {'code': lang}
+            for field in fields_names:
+                if field in self._columns:
+                    res_trans = self.pool.get('ir.translation').\
+                            _get_source(cursor, self._name + ',' + field,
+                                    'field', lang)
+                    if res_trans:
+                        res[lang][field] = res_trans
+                    else:
+                        res[lang][field] = self._columns[field].string
+        for table in self._inherits:
+            cols = intersect(self._inherit_fields.keys(), fields_names)
+            res2 = self.pool.get(table).read_string(cursor, user, object_id,
+                    langs, cols, context)
+        for lang in res2:
+            if lang in res:
+                res[lang] = {'code': lang}
+            for field in res2[lang]:
+                res[lang][field] = res2[lang][field]
+        return res
+
+    def write_string(self, cursor, user, object_id, langs, vals, context=None):
+        if context is None:
+            context = {}
+        self.pool.get('ir.model.access').check(cursor, user, 'ir.translation',
+                'write')
+        for lang in langs:
+            for field in vals:
+                if field in self._columns:
+                    self.pool.get('ir.translation')._set_ids(cursor, user,
+                            self._name + ',' + field, 'field', lang, [0],
+                            vals[field])
+        for table in self._inherits:
+            cols = intersect(self._inherit_fields.keys(), vals)
+            if cols:
+                self.pool.get(table).write_string(cursor, user, object_id,
+                        langs, vals, context)
+        return True
+
+    def check_recursion(self, cursor, user, ids, parent=None):
+        if parent is None:
+            parent = self._parent_name
+        ids_parent = ids[:]
+        while len(ids_parent):
+            cursor.execute('SELECT distinct "' + parent + '" ' +
+                'FROM "' + self._table + '" ' +
+                'WHERE id IN (' + ','.join([str(x) for x in ids_parent]) + ')')
+            ids_parent = [x[0] for x in cursor.fetchall()]
+            for i in ids_parent:
+                if i in ids:
+                    return False
+        return True
+
+orm = ORM
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/osv/osv.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,226 @@
+"Objects Services"
+
+from orm import orm, except_orm
+from trytond.netsvc import Service, LocalService, Logger, LOG_ERROR
+from trytond import pooler
+import copy
+import sys
+from psycopg import IntegrityError
+from trytond.tools import UpdateableDict
+
+MODULE_LIST = []
+MODULE_CLASS_LIST = {}
+CLASS_POOL = {}
+
+class ExceptOSV(Exception):
+
+    def __init__(self, name, value, exc_type='warning'):
+        Exception.__init__(self)
+        self.name = name
+        self.exc_type = exc_type
+        self.value = value
+        self.args = (exc_type, name)
+
+except_osv = ExceptOSV
+
+class OSVService(Service):
+
+    def __init__(self):
+        self.object_name_pool = {}
+        self.module_obj_list = {}
+        self.created = []
+        self._sql_error = {}
+        Service.__init__(self, 'obj_proxy')
+        Service.join_group(self, 'web-services')
+        Service.export_method(self, self.object_name_list)
+        Service.export_method(self, self.exec_workflow)
+        Service.export_method(self, self.execute)
+        Service.export_method(self, self.execute_cr)
+
+    def execute_cr(self, cursor, user, object_name, method, *args, **kargs):
+        # TODO: check security level
+        try:
+            obj = pooler.get_pool(cursor.dbname).get(object_name)
+            if not obj:
+                self.abort_response('Object Error', 'warning',
+                'Object %s doesn\'t exist' % str(object_name))
+            if (not method in getattr(obj,'_protected')) and len(args) \
+                    and args[0] and len(obj._inherits):
+                types = {object_name: args[0]}
+                cursor.execute('SELECT inst_type, inst_id, object_name_id ' \
+                        'FROM inherit ' \
+                        'WHERE object_name_type = %s '\
+                            'AND  object_name_id in (' + \
+                            ','.join([str(x) for x in args[0]]) + ')',
+                            (object_name,))
+                for inst_type, inst_id, object_name_id in cursor.fetchall():
+                    if not inst_type in types:
+                        types[inst_type] = []
+                    types[inst_type].append(inst_id)
+                    types[object_name].remove(object_name_id)
+                for i, ids in types.items():
+                    if len(ids):
+                        obj_t = pooler.get_pool(cursor.dbname).get(i)
+                        res = getattr(obj_t, method)(cursor, user, ids,
+                                *args[1:], **kargs)
+            else:
+                res = getattr(obj, method)(cursor, user, *args, **kargs)
+            return res
+        except except_orm, inst:
+            self.abort_response(inst.name, 'warning', inst.value)
+        except ExceptOSV, inst:
+            self.abort_response(inst.name, inst.exc_type, inst.value)
+        except IntegrityError, inst:
+            for key in self._sql_error.keys():
+                if key in inst[0]:
+                    self.abort_response('Constraint Error', 'warning',
+                            self._sql_error[key])
+            self.abort_response('Integrity Error', 'warning', inst[0])
+        except:
+            import traceback
+            tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
+                sys.exc_type, sys.exc_value, sys.exc_traceback))
+            logger = Logger()
+            logger.notify_channel("web-services", LOG_ERROR,
+                    'Exception in call: ' + tb_s)
+            raise
+
+    def execute(self, dbname, user, object_name, method, *args, **kargs):
+        database, pool = pooler.get_db_and_pool(dbname)
+        cursor = database.cursor()
+        # TODO add retry when exception for concurency update
+        try:
+            try:
+                res = pool.execute_cr(cursor, user, object_name, method,
+                        *args, **kargs)
+                cursor.commit()
+            except Exception:
+                cursor.rollback()
+                raise
+        finally:
+            cursor.close()
+        return res
+
+    def exec_workflow_cr(self, cursor, user, object_name, method, *args):
+        wf_service = LocalService("workflow")
+        wf_service.trg_validate(user, object_name, args[0], method, cursor)
+        return True
+
+    def exec_workflow(self, dbname, user, object_name, method, *args):
+        cursor = pooler.get_db(dbname).cursor()
+        # TODO add retry when exception for concurency update
+        try:
+            try:
+                res = self.exec_workflow_cr(cursor, user, object_name, method,
+                        *args)
+                cursor.commit()
+            except Exception:
+                cursor.rollback()
+                raise
+        finally:
+            cursor.close()
+        return res
+
+    def object_name_list(self):
+        return self.object_name_pool.keys()
+
+    def add(self, name, object_name_inst):
+        """
+        adds a new obj instance to the obj pool.
+        if it already existed, the instance is replaced
+        """
+        if self.object_name_pool.has_key(name):
+            del self.object_name_pool[name]
+        self.object_name_pool[name] = object_name_inst
+
+        module = str(object_name_inst.__class__)[6:]
+        module = module[:len(module)-1]
+        module = module.split('.')[0][2:]
+        self.module_obj_list.setdefault(module, []).append(object_name_inst)
+
+    def get(self, name):
+        return self.object_name_pool.get(name, None)
+
+    def instanciate(self, module):
+        res = []
+        class_list = MODULE_CLASS_LIST.get(module, [])
+        for klass in class_list:
+            res.append(klass.create_instance(self, module))
+        return res
+
+osv_pool = OSVService
+
+
+class OSV(orm):
+
+    def __new__(cls):
+        for module in cls.__module__.split('.'):
+            if module != 'trytond' and module != 'addons':
+                break
+        if not hasattr(cls, '_module'):
+            cls._module = module
+        MODULE_CLASS_LIST.setdefault(cls._module, []).append(cls)
+        CLASS_POOL[cls._name] = cls
+        if module not in MODULE_LIST:
+            MODULE_LIST.append(cls._module)
+        return None
+
+    def create_instance(cls, pool, module):
+        """
+        try to apply inheritancy at the instanciation level and
+        put objs in the pool var
+        """
+        parent_name = hasattr(cls, '_inherit') and cls._inherit
+        if parent_name:
+            parent_class = pool.get(parent_name).__class__
+            assert parent_class, "parent class %s does not exist !" % \
+                    parent_name
+            nattr = {}
+            for i in (
+                    '_columns',
+                    '_defaults',
+                    '_inherits',
+                    '_constraints',
+                    '_sql_constraints',
+                    ):
+                new = copy.copy(getattr(pool.get(parent_name), i))
+                if hasattr(new, 'update'):
+                    new.update(cls.__dict__.get(i, {}))
+                else:
+                    new.extend(cls.__dict__.get(i, []))
+                nattr[i] = new
+            name = hasattr(cls,'_name') and cls._name or cls._inherit
+            cls = type(name, (cls, parent_class), nattr)
+
+        obj = object.__new__(cls)
+        obj.__init__(pool)
+        return obj
+
+    create_instance = classmethod(create_instance)
+
+    def __init__(self, pool):
+        pool.add(self._name, self)
+        self.pool = pool
+        orm.__init__(self)
+
+osv = OSV
+
+
+class Cacheable(object):
+
+    _cache = UpdateableDict()
+
+    def add(self, key, value):
+        self._cache[key] = value
+
+    def invalidate(self, key):
+        del self._cache[key]
+
+    def get(self, key):
+        try:
+            return self._cache[key]
+        except KeyError:
+            return None
+
+    def clear(self):
+        self._cache.clear()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/pooler.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,56 @@
+from sql_db import db_connect
+from netsvc import Logger, LOG_INFO
+
+_DB = {}
+_POOL = {}
+
+def get_db_and_pool(db_name, force_demo=False, update_module=False):
+    if db_name in _DB:
+        database = _DB[db_name]
+    else:
+        logger = Logger()
+        logger.notify_channel('pooler', LOG_INFO,
+                'Connecting to %s' % (db_name))
+        database = db_connect(db_name)
+        _DB[db_name] = database
+
+    if db_name in _POOL:
+        pool = _POOL[db_name]
+    else:
+        from osv.osv import OSVService
+        pool = OSVService()
+        _POOL[db_name] = pool
+        from module import load_modules
+        load_modules(database, force_demo, update_module)
+
+        if not update_module:
+            import report
+            #report.interface.register_all(database)
+            pool.get('ir.cron').pool_jobs(database.dbname)
+    return database, pool
+
+def restart_pool(db_name, force_demo=False, update_module=False):
+    del _POOL[db_name]
+    return get_db_and_pool(db_name, force_demo, update_module=update_module)
+
+def close_db(db_name):
+    if db_name in _DB:
+        _DB[db_name].truedb.close()
+        del _DB[db_name]
+    if db_name in _POOL:
+        del _POOL[db_name]
+
+def get_db_only(db_name):
+    if db_name in _DB:
+        database = _DB[db_name]
+    else:
+        database = db_connect(db_name)
+        _DB[db_name] = database
+    return database
+
+def get_db(db_name):
+    return get_db_and_pool(db_name)[0]
+
+def get_pool(db_name, force_demo=False, update_module=False):
+    pool = get_db_and_pool(db_name, force_demo, update_module)[1]
+    return pool
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/security.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,37 @@
+import pooler
+from config import CONFIG
+
+_USER_CACHE = {}
+
+def login(dbname, loginname, password):
+    cursor = pooler.get_db(dbname).cursor()
+    cursor.execute('SELECT id FROM res_users '
+        'WHERE login = %s and password = %s and active',
+        (loginname.encode('utf-8'), password.encode('utf-8')))
+    res = cursor.fetchone()
+    cursor.close()
+    if res:
+        return res[0]
+    else:
+        return False
+
+def check_super(passwd):
+    if passwd == CONFIG['admin_passwd']:
+        return True
+    else:
+        raise Exception('AccessDenied')
+
+def check(dbname, user, passwd):
+    # FIXME: this should be db dependent
+    if _USER_CACHE.has_key(user) and (_USER_CACHE[user]==passwd):
+        return True
+    cursor = pooler.get_db(dbname).cursor()
+    cursor.execute('SELECT count(*) FROM res_users ' \
+            'WHERE id = %d AND password = %s', (int(user), passwd))
+    res = cursor.fetchone()[0]
+    cursor.close()
+    if not bool(res):
+        raise Exception('AccessDenied')
+    if res:
+        _USER_CACHE[user] = passwd
+    return bool(res)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/sql_db.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,187 @@
+import psycopg
+import re
+import os
+from mx import DateTime as mdt
+import zipfile
+import version
+from config import CONFIG
+
+RE_FROM = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$')
+RE_INTO = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$')
+
+class FakeCursor:
+    nbr = 0
+    _tables = {}
+    sql_from_log = {}
+    sql_into_log = {}
+    sql_log = False
+    count = 0
+
+    def __init__(self, database, con, dbname):
+        self.db = database
+        self.obj = database.cursor()
+        self.con = con
+        self.dbname = dbname
+
+    def execute(self, sql, params=None):
+        if not params:
+            params = ()
+
+        def base_string(string):
+            if isinstance(string, unicode):
+                return string.encode('utf-8')
+            return string
+
+        para = [base_string(string) for string in params]
+        if isinstance(sql, unicode):
+            sql = sql.encode('utf-8')
+        if self.sql_log:
+            now = mdt.now()
+        if para:
+            res = self.obj.execute(sql, para)
+        else:
+            res = self.obj.execute(sql)
+        if self.sql_log:
+            self.count += 1
+            res_from = RE_FROM.match(sql.lower())
+            if res_from:
+                self.sql_from_log.setdefault(res_from.group(1), [0, 0])
+                self.sql_from_log[res_from.group(1)][0] += 1
+                self.sql_from_log[res_from.group(1)][1] += mdt.now() - now
+            res_into = RE_INTO.match(sql.lower())
+            if res_into:
+                self.sql_into_log.setdefault(res_into.group(1), [0, 0])
+                self.sql_into_log[res_into.group(1)][0] += 1
+                self.sql_into_log[res_into.group(1)][1] += mdt.now() - now
+        return res
+
+    def print_log(self, sql_type='from'):
+        print "SQL LOG %s:" % (sql_type,)
+        if sql_type == 'from':
+            logs = self.sql_from_log.items()
+        else:
+            logs = self.sql_into_log.items()
+        logs.sort(lambda x, y: cmp(x[1][1], y[1][1]))
+        amount = 0
+        for log in logs:
+            print "table:", log[0], ":", str(log[1][1]), "/", log[1][0]
+            amount += log[1][1]
+        print "SUM:%s/%d"% (amount, self.count)
+
+    def close(self):
+        if self.sql_log:
+            self.print_log('from')
+            self.print_log('into')
+        self.obj.close()
+
+        # This force the cursor to be freed, and thus, available again. It is
+        # important because otherwise we can overload the server very easily
+        # because of a cursor shortage (because cursors are not garbage
+        # collected as fast as they should). The problem is probably due in
+        # part because browse records keep a reference to the cursor.
+        del self.obj
+
+    def __getattr__(self, name):
+        return getattr(self.obj, name)
+
+class FakeDB:
+
+    def __init__(self, truedb, dbname):
+        self.truedb = truedb
+        self.dbname = dbname
+
+    def cursor(self):
+        return FakeCursor(self.truedb, {}, self.dbname)
+
+def db_connect(db_name, serialize=0):
+    host = CONFIG['db_host'] and "host=%s" % CONFIG['db_host'] or ''
+    port = CONFIG['db_port'] and "port=%s" % CONFIG['db_port'] or ''
+    name = "dbname=%s" % db_name
+    user = CONFIG['db_user'] and "user=%s" % CONFIG['db_user'] or ''
+    password = CONFIG['db_password'] \
+            and "password=%s" % CONFIG['db_password'] or ''
+    maxconn = int(CONFIG['db_maxconn']) or 64
+    tdb = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password),
+            serialize=serialize, maxconn=maxconn)
+    fdb = FakeDB(tdb, db_name)
+    return fdb
+
+def init_db(cursor):
+    sql_file = os.path.join(os.path.dirname(__file__), 'init.sql')
+    for line in file(sql_file).read().split(';'):
+        if (len(line)>0) and (not line.isspace()):
+            cursor.execute(line)
+
+    opj = os.path.join
+    addons_path = os.path.join(os.path.dirname(__file__), 'addons')
+
+    for i in (os.listdir(addons_path) + ['ir', 'res']):
+        terp_file = opj(addons_path, i, '__terp__.py')
+        mod_path = opj(addons_path, i)
+        if i in ('ir', 'res'):
+            root_path = os.path.dirname(__file__)
+            terp_file = opj(root_path, i, '__terp__.py')
+            mod_path = opj(root_path, i)
+        info = {}
+        if os.path.isfile(terp_file) \
+                and not os.path.isfile(opj(addons_path, i + '.zip')):
+            info = eval(file(terp_file).read())
+        elif zipfile.is_zipfile(mod_path):
+            zfile = zipfile.ZipFile(mod_path)
+            i = os.path.splitext(i)[0]
+            info = eval(zfile.read(opj(i, '__terp__.py')))
+        if info:
+            categs = info.get('category', 'Uncategorized').split('/')
+            p_id = None
+            while categs:
+                if p_id is not None:
+                    cursor.execute('SELECT id ' \
+                            'FROM ir_module_category ' \
+                            'WHERE name = %s AND parent_id = %d',
+                            (categs[0], p_id))
+                else:
+                    cursor.execute('SELECT id ' \
+                            'FROM ir_module_category ' \
+                            'WHERE name = %s AND parent_id is NULL',
+                            (categs[0],))
+                c_id = cursor.fetchone()
+                if not c_id:
+                    cursor.execute(
+                            'SELECT NEXTVAL(\'ir_module_category_id_seq\')')
+                    c_id = cursor.fetchone()[0]
+                    cursor.execute('INSERT INTO ir_module_category ' \
+                            '(id, name, parent_id) ' \
+                            'VALUES (%d, %s, %d)', (c_id, categs[0], p_id))
+                else:
+                    c_id = c_id[0]
+                p_id = c_id
+                categs = categs[1:]
+
+            active = info.get('active', False)
+            installable = info.get('installable', True)
+            if installable:
+                if active:
+                    state = 'to install'
+                else:
+                    state = 'uninstalled'
+            else:
+                state = 'uninstallable'
+            cursor.execute('SELECT NEXTVAL(\'ir_module_module_id_seq\')')
+            module_id = cursor.fetchone()[0]
+            cursor.execute('INSERT INTO ir_module_module ' \
+                    '(id, author, latest_version, website, name, shortdesc, ' \
+                    'description, category_id, state) ' \
+                    'VALUES (%d, %s, %s, %s, %s, %s, %s, %d, %s)',
+                    (module_id, info.get('author', ''),
+                version.VERSION.rsplit('.', 1)[0] + '.' + info.get('version', ''),
+                info.get('website', ''), i, info.get('name', False),
+                info.get('description', ''), p_id, state))
+            dependencies = info.get('depends', [])
+            for dependency in dependencies:
+                cursor.execute('INSERT INTO ir_module_module_dependency ' \
+                        '(module_id, name) VALUES (%s, %s)',
+                        (module_id, dependency))
+
+psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
+psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
+psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/tiny_socket.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,71 @@
+import socket
+import cPickle
+
+
+class MyException(Exception):
+
+    def __init__(self, faultcode, faultstring):
+        Exception.__init__(self)
+        self.faultcode = faultcode
+        self.faultstring = faultstring
+        self.args = (faultcode, faultstring)
+
+
+class MySocket:
+
+    def __init__(self, sock=None):
+        if sock is None:
+            self.sock = socket.socket(
+            socket.AF_INET, socket.SOCK_STREAM)
+        else:
+            self.sock = sock
+        self.sock.settimeout(120)
+
+    def connect(self, host, port=False):
+        if not port:
+            protocol, buf = host.split('//')
+            host, port = buf.split(':')
+        self.sock.connect((host, int(port)))
+
+    def disconnect(self):
+        self.sock.shutdown(socket.SHUT_RDWR)
+        self.sock.close()
+
+    def mysend(self, msg, exception=False, traceback=None):
+        msg = cPickle.dumps([msg, traceback])
+        size = len(msg)
+        self.sock.send('%8d' % size)
+        self.sock.send(exception and "1" or "0")
+        totalsent = 0
+        while totalsent < size:
+            sent = self.sock.send(msg[totalsent:])
+            if sent == 0:
+                raise RuntimeError, "socket connection broken"
+            totalsent = totalsent + sent
+
+    def myreceive(self):
+        buf = ''
+        while len(buf) < 8:
+            chunk = self.sock.recv(8 - len(buf))
+            if chunk == '':
+                raise RuntimeError, "socket connection broken"
+            buf += chunk
+        size = int(buf)
+        buf = self.sock.recv(1)
+        if buf != "0":
+            exception = buf
+        else:
+            exception = False
+        msg = ''
+        while len(msg) < size:
+            chunk = self.sock.recv(size-len(msg))
+            if chunk == '':
+                raise RuntimeError, "socket connection broken"
+            msg = msg + chunk
+        res = cPickle.loads(msg)
+        if isinstance(res[0], Exception):
+            if exception:
+                raise MyException(str(res[0]), str(res[1]))
+            raise res[0]
+        else:
+            return res[0]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/tools/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,4 @@
+from config import *
+from misc import *
+from translate import *
+from convert import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/tools/convert.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,766 @@
+"Convert"
+import re
+import StringIO, xml.dom.minidom
+from trytond import pooler
+from trytond.osv.fields import Integer
+import csv
+import os.path
+from trytond.netsvc import Logger, LOG_ERROR, LOG_INFO, LocalService
+from trytond.config import CONFIG
+from trytond.version import VERSION
+import logging
+
+
+def _ref(self, cursor):
+    return lambda x: self.id_get(cursor, x)
+
+def _obj(pool, cursor, user, model_str, context=None):
+    model = pool.get(model_str)
+    return lambda x: model.browse(cursor, user, x, context=context)
+
+def _eval_xml(self, node, pool, cursor, user, idref, context=None):
+    if context is None:
+        context = {}
+    if node.nodeType == node.TEXT_NODE:
+        return node.data.encode("utf8")
+    elif node.nodeType == node.ELEMENT_NODE:
+        if node.nodeName in ('field','value'):
+            f_type = node.getAttribute('type') or 'char'
+            f_model = node.getAttribute("model").encode('ascii')
+            if len(node.getAttribute('search')):
+                f_search = node.getAttribute("search").encode('utf-8')
+                f_use = node.getAttribute("use").encode('ascii')
+                f_name = node.getAttribute("name").encode('utf-8')
+                if len(f_use)==0:
+                    f_use = "id"
+                args = eval(f_search, idref)
+                ids = pool.get(f_model).search(cursor, user, args)
+                if f_use != 'id':
+                    ids = [x[f_use] for x in pool.get(f_model).read(
+                        cursor, user, ids, [f_use])]
+                _cols = pool.get(f_model)._columns
+                if (f_name in _cols) and _cols[f_name]._type=='many2many':
+                    return ids
+                f_val = False
+                if len(ids):
+                    f_val = ids[0]
+                    if isinstance(f_val, tuple):
+                        f_val = f_val[0]
+                return f_val
+            a_eval = node.getAttribute('eval')
+            if len(a_eval):
+                import time
+                idref['time'] = time
+                idref['version'] = VERSION.rsplit('.', 1)[0]
+                idref['ref'] = lambda x: self.id_get(cursor, x)
+                if len(f_model):
+                    idref['obj'] = _obj(self.pool, cursor, user, f_model,
+                            context=context)
+                try:
+                    import pytz
+                except:
+                    Logger().notify_channel("init", LOG_INFO,
+                            'could not find pytz library')
+
+
+                    class Pytz(object):
+                        all_timezones = []
+
+                    pytz = Pytz()
+                idref['pytz'] = pytz
+                return eval(a_eval, idref)
+            if f_type == 'xml':
+
+                def _process(string, idref):
+                    matches = re.findall('[^%]%\((.*?)\)[ds]', string)
+                    for i in matches:
+                        if not i in idref:
+                            idref[i] = self.id_get(cursor, i)
+                    return string % idref
+
+                txt = '<?xml version="1.0"?>\n' + \
+                        _process("".join([x.toxml().encode("utf8") \
+                        for x in node.childNodes]), idref)
+                return txt
+            if f_type in ('char', 'int', 'float'):
+                value = ""
+                for child_node in node.childNodes:
+                    value += str(_eval_xml(self, child_node, pool, cursor,
+                        user, idref))
+                if f_type == 'int':
+                    value = value.strip()
+                    if value == 'None':
+                        return None
+                    else:
+                        value = int(value.strip())
+                elif f_type == 'float':
+                    value = float(value.strip())
+                return value
+            elif f_type in ('list', 'tuple'):
+                res = []
+                for child_node in node.childNodes:
+                    if child_node.nodeType != child_node.ELEMENT_NODE \
+                            or child_node.nodeName != 'value':
+                        continue
+                    res.append(_eval_xml(self, child_node, pool, cursor,
+                        user, idref))
+                if f_type == 'tuple':
+                    return tuple(res)
+                return res
+        elif node.nodeName=="getitem":
+            for child_node in node.childNodes:
+                if child_node.nodeType != child_node.ELEMENT_NODE:
+                    continue
+                res = _eval_xml(self, child_node, pool, cursor, user, idref)
+            if not res:
+                raise LookupError
+            elif node.getAttribute('type') in ("int", "list"):
+                return res[int(node.getAttribute('index'))]
+            else:
+                return res[node.getAttribute('index').encode("utf8")]
+        elif node.nodeName=="function":
+            args = []
+            a_eval = node.getAttribute('eval')
+            if len(a_eval):
+                idref['ref'] = lambda x: self.id_get(cursor, x)
+                args = eval(a_eval, idref)
+            for child_node in node.childNodes:
+                if child_node.nodeType != child_node.ELEMENT_NODE:
+                    continue
+                args.append(_eval_xml(self, child_node, pool, cursor, user,
+                    idref, context))
+            model = pool.get(node.getAttribute('model'))
+            method = node.getAttribute('name')
+            res = getattr(model, method)(cursor, user, *args)
+            return res
+        elif node.nodeName=="test":
+            value = ""
+            for child_node in node.childNodes:
+                value += str(_eval_xml(self, child_node, pool, cursor, user,
+                    idref, context=context))
+            return value
+
+ESCAPE_RE = re.compile(r'(?<!\\)/')
+def escape(i):
+    return i.replace('\\/', '/')
+
+
+class AssertionReport(object):
+
+    def __init__(self):
+        self._report = {}
+
+    def record_assertion(self, success, severity):
+        """
+            Records the result of an assertion for the failed/success count
+            retrurns success
+        """
+        if severity in self._report:
+            self._report[severity][success] += 1
+        else:
+            self._report[severity] = {success:1, not success: 0}
+        return success
+
+    def get_report(self):
+        return self._report
+
+    def __str__(self):
+        res = '\nAssertions report:\nLevel\tsuccess\tfailed\n'
+        success = failed = 0
+        for sev in self._report:
+            res += sev + '\t' + str(self._report[sev][True]) + '\t' + \
+                    str(self._report[sev][False]) + '\n'
+            success += self._report[sev][True]
+            failed += self._report[sev][False]
+        res += 'total\t' + str(success) + '\t' + str(failed) + '\n'
+        res += 'end of report (' + str(success + failed) + \
+                ' assertion(s) checked)'
+        return res
+
+class XMLImport(object):
+
+    def isnoupdate(self, data_node = None):
+        return self.noupdate or (data_node \
+                and data_node.getAttribute('noupdate'))
+
+    def get_context(self, data_node, node, eval_dict):
+        data_node_context = (data_node \
+                and data_node.getAttribute('context').encode('utf8'))
+        if data_node_context:
+            context = eval(data_node_context, eval_dict)
+        else:
+            context = {}
+
+        node_context = node.getAttribute("context").encode('utf8')
+        if len(node_context):
+            context.update(eval(node_context, eval_dict))
+
+        return context
+
+    def get_uid(self, cursor, user, data_node, node):
+        node_uid = node.getAttribute('user') or (data_node \
+                and data_node.getAttribute('user'))
+        if len(node_uid):
+            return self.id_get(cursor, node_uid)
+        return user
+
+    def _test_xml_id(self, xml_id):
+        obj_id = xml_id
+        if '.' in xml_id:
+            obj_id = xml_id.split('.')[1]
+        if len(obj_id) > 64:
+            Logger().notify_channel('init', LOG_ERROR,
+                    'id: %s is to long (max: 64)' % xml_id)
+    def _tag_delete(self, cursor, rec, data_node=None):
+        d_model = rec.getAttribute("model")
+        d_search = rec.getAttribute("search")
+        d_id = rec.getAttribute("id")
+        ids = []
+        if len(d_search):
+            ids = self.pool.get(d_model).search(cursor, self.user,
+                    eval(d_search))
+        if len(d_id):
+            ids.append(self.id_get(cursor, d_id))
+        if len(ids):
+            self.pool.get(d_model).unlink(cursor, self.user, ids)
+            #self.pool.get('ir.model.data')._unlink(cursor, self.user, d_model,
+            #   ids, direct=True)
+        return False
+
+    def _tag_report(self, cursor, rec, data_node=None):
+        res = {}
+        for dest, attr in (
+                ('name', 'string'),
+                ('model', 'model'),
+                ('report_name', 'name'),
+                ):
+            res[dest] = rec.getAttribute(attr).encode('utf8')
+            assert res[dest], "Attribute %s of report is empty !" % (attr,)
+        for field, dest in (
+                ('rml', 'report_rml'),
+                ('xml', 'report_xml'),
+                ('xsl', 'report_xsl'),
+                ):
+            if rec.hasAttribute(field):
+                res[dest] = rec.getAttribute(field).encode('utf8')
+        if rec.hasAttribute('auto'):
+            res['auto'] = eval(rec.getAttribute('auto'))
+        if rec.hasAttribute('header'):
+            res['header'] = eval(rec.getAttribute('header'))
+        res['multi'] = rec.hasAttribute('multi') \
+                and  eval(rec.getAttribute('multi'))
+        xml_id = rec.getAttribute('id').encode('utf8')
+        self._test_xml_id(xml_id)
+        obj_id = self.pool.get('ir.model.data')._update(cursor, self.user,
+                "ir.actions.report.xml", self.module, res, xml_id,
+                mode=self.mode)
+        self.idref[xml_id] = int(obj_id)
+        if not rec.hasAttribute('menu') or eval(rec.getAttribute('menu')):
+            keyword = str(rec.getAttribute('keyword') or 'client_print_multi')
+            value = 'ir.actions.report.xml,' + str(obj_id)
+            replace = rec.hasAttribute('replace') \
+                    and rec.getAttribute("replace")
+            self.pool.get('ir.model.data').ir_set(cursor, self.user, 'action',
+                    keyword, res['name'], [res['model']], value,
+                    replace=replace, isobject=True, xml_id=xml_id)
+        return False
+
+    def _tag_function(self, cursor, rec, data_node=None):
+        if self.isnoupdate(data_node) and self.mode != 'init':
+            return
+        context = self.get_context(data_node, rec, {'ref': _ref(self, cursor)})
+        user = self.get_uid(cursor, self.user, data_node, rec)
+        _eval_xml(self, rec, self.pool, cursor, user, self.idref,
+                context=context)
+        return False
+
+    def _tag_wizard(self, cursor, rec, data_node=None):
+        string = rec.getAttribute("string").encode('utf8')
+        model = rec.getAttribute("model").encode('utf8')
+        name = rec.getAttribute("name").encode('utf8')
+        xml_id = rec.getAttribute('id').encode('utf8')
+        self._test_xml_id(xml_id)
+        multi = rec.hasAttribute('multi') and  eval(rec.getAttribute('multi'))
+        res = {'name': string, 'wiz_name': name, 'multi':multi}
+
+        obj_id = self.pool.get('ir.model.data')._update(cursor, self.user,
+                "ir.actions.wizard", self.module, res, xml_id, mode=self.mode)
+        self.idref[xml_id] = int(obj_id)
+        # ir_set
+        if (not rec.hasAttribute('menu') or eval(rec.getAttribute('menu'))) \
+                and obj_id:
+            keyword = str(rec.getAttribute('keyword') or 'client_action_multi')
+            value = 'ir.actions.wizard,' + str(obj_id)
+            replace = rec.hasAttribute('replace') and \
+                    rec.getAttribute("replace") or True
+            self.pool.get('ir.model.data').ir_set(cursor, self.user, 'action',
+                    keyword, string, [model], value, replace=replace,
+                    isobject=True, xml_id=xml_id)
+        return False
+
+    def _tag_act_window(self, cursor, rec, data_node=None):
+        name = rec.hasAttribute('name') \
+                and rec.getAttribute('name').encode('utf-8')
+        xml_id = rec.getAttribute('id').encode('utf8')
+        self._test_xml_id(xml_id)
+        ftype = rec.hasAttribute('type') \
+                and rec.getAttribute('type').encode('utf-8') \
+                or 'ir.actions.act_window'
+        view_id = False
+        if rec.hasAttribute('view'):
+            view_id = self.id_get(cursor,
+                    rec.getAttribute('view').encode('utf-8'))
+        domain = rec.hasAttribute('domain') \
+                and rec.getAttribute('domain').encode('utf-8')
+        context = rec.hasAttribute('context') \
+                and rec.getAttribute('context').encode('utf-8') \
+                or '{}'
+        res_model = rec.getAttribute('res_model').encode('utf-8')
+        src_model = rec.hasAttribute('src_model') \
+                and rec.getAttribute('src_model').encode('utf-8')
+        view_type = rec.hasAttribute('view_type') \
+                and rec.getAttribute('view_type').encode('utf-8') \
+                or 'form'
+        view_mode = rec.hasAttribute('view_mode') \
+                and rec.getAttribute('view_mode').encode('utf-8') \
+                or 'tree,form'
+        usage = rec.hasAttribute('usage') \
+                and rec.getAttribute('usage').encode('utf-8')
+        limit = rec.hasAttribute('limit') \
+                and rec.getAttribute('limit').encode('utf-8')
+        auto_refresh = rec.hasAttribute('auto_refresh') \
+                and rec.getAttribute('auto_refresh').encode('utf-8')
+
+        res = {
+                'name': name,
+                'type': ftype,
+                'view_id': view_id,
+                'domain': domain,
+                'context': context,
+                'res_model': res_model,
+                'src_model': src_model,
+                'view_type': view_type,
+                'view_mode': view_mode,
+                'usage': usage,
+                'limit': limit,
+                'auto_refresh': auto_refresh,
+            }
+
+        obj_id = self.pool.get('ir.model.data')._update(cursor, self.user,
+                'ir.actions.act_window', self.module, res, xml_id,
+                mode=self.mode)
+        self.idref[xml_id] = int(obj_id)
+
+        if src_model:
+            keyword = 'client_action_relate'
+            value = 'ir.actions.act_window,' + str(obj_id)
+            replace = rec.hasAttribute('replace') \
+                    and rec.getAttribute('replace')
+            self.pool.get('ir.model.data').ir_set(cursor, self.user, 'action',
+                    keyword, xml_id, [src_model], value, replace=replace,
+                    isobject=True, xml_id=xml_id)
+        # TODO add remove ir.model.data
+        return False
+
+    def _tag_ir_set(self, cursor, rec, data_node=None):
+        if not self.mode == 'init':
+            return False
+        res = {}
+        for field in [i for i in rec.childNodes \
+                if (i.nodeType == i.ELEMENT_NODE and i.nodeName=="field")]:
+            f_name = field.getAttribute("name").encode('utf-8')
+            f_val = _eval_xml(self, field, self.pool, cursor, self.user,
+                    self.idref)
+            res[f_name] = f_val
+        self.pool.get('ir.model.data').ir_set(cursor, self.user, res['key'],
+                res['key2'], res['name'], res['models'], res['value'],
+                replace=res.get('replace',True),
+                isobject=res.get('isobject', False), meta=res.get('meta',None))
+        return False
+
+    def _tag_workflow(self, cursor, rec, data_node=None):
+        if self.isnoupdate(data_node) and self.mode != 'init':
+            return
+        model = str(rec.getAttribute('model'))
+        w_ref = rec.getAttribute('ref')
+        if len(w_ref):
+            obj_id = self.id_get(cursor, w_ref)
+        else:
+            assert rec.childNodes, 'You must define a child node ' \
+                    'if you dont give a ref'
+            element_childs = [i for i in rec.childNodes \
+                    if i.nodeType == i.ELEMENT_NODE]
+            assert len(element_childs) == 1, 'Only one child node ' \
+                    'is accepted (%d given)' % len(rec.childNodes)
+            obj_id = _eval_xml(self, element_childs[0], self.pool, cursor,
+                    self.user, self.idref)
+
+        user = self.get_uid(cursor, self.user, data_node, rec)
+        wf_service = LocalService("workflow")
+        wf_service.trg_validate(user, model,
+            obj_id,
+            str(rec.getAttribute('action')), cursor)
+        return False
+
+    def _tag_menuitem(self, cursor, rec, data_node=None):
+        rec_id = rec.getAttribute("id").encode('ascii')
+        self._test_xml_id(rec_id)
+        m_l = [escape(x) for x in ESCAPE_RE.split(
+            rec.getAttribute("name").encode('utf8'))]
+        pid = False
+        for idx, menu_elem in enumerate(m_l):
+            if pid:
+                cursor.execute('SELECT id FROM ir_ui_menu ' \
+                        'WHERE parent_id = %d AND name = %s',
+                        (pid, menu_elem))
+            else:
+                cursor.execute('SELECT id FROM ir_ui_menu ' \
+                        'WHERE parent_id IS NULL AND name = %s',
+                        (menu_elem,))
+            res = cursor.fetchone()
+            if idx == (len(m_l) - 1):
+                # we are at the last menu element/level (it's a leaf)
+                values = {'parent_id': pid, 'name': menu_elem}
+
+                if rec.hasAttribute('action'):
+                    a_action = rec.getAttribute('action').encode('utf8')
+                    a_type = rec.getAttribute('type').encode('utf8') \
+                            or 'act_window'
+                    icons = {
+                        "act_window": 'STOCK_NEW',
+                        "report.xml": 'STOCK_PASTE',
+                        "wizard": 'STOCK_EXECUTE',
+                        "url": 'STOCK_JUMP_TO'
+                    }
+                    values['icon'] = icons.get(a_type,'STOCK_NEW')
+                    if a_type == 'act_window':
+                        a_id = self.id_get(cursor, a_action)
+                        cursor.execute('SELECT view_type, view_mode, ' \
+                                'name, view_id ' \
+                                'FROM ir_act_window ' \
+                                'WHERE id = %d', (int(a_id),))
+                        action_type, action_mode, action_name, view_id = \
+                                cursor.fetchone()
+                        if view_id:
+                            cursor.execute('SELECT type FROM ir_ui_view ' \
+                                    'WHERE id = %d', (int(view_id),))
+                            action_mode, = cursor.fetchone()
+                        cursor.execute('SELECT view_mode ' \
+                                'FROM ir_act_window_view ' \
+                                'WHERE act_window_id = %d ' \
+                                'ORDER BY sequence LIMIT 1', (int(a_id),))
+                        if cursor.rowcount:
+                            action_mode, = cursor.fetchone()
+                        if action_type == 'tree':
+                            values['icon'] = 'STOCK_INDENT'
+                        elif action_mode and action_mode.startswith('tree'):
+                            values['icon'] = 'STOCK_JUSTIFY_FILL'
+                        elif action_mode and action_mode.startswith('graph'):
+                            values['icon'] = 'terp-graph'
+                        elif action_mode and action_mode.startswith('calendar'):
+                            values['icon'] = 'terp-calendar'
+                        if not values['name']:
+                            values['name'] = action_name
+                if rec.hasAttribute('sequence'):
+                    values['sequence'] = int(rec.getAttribute('sequence'))
+                if rec.hasAttribute('icon'):
+                    values['icon'] = str(rec.getAttribute('icon'))
+                if rec.hasAttribute('groups'):
+                    g_names = rec.getAttribute('groups').split(',')
+                    groups_value = []
+                    groups_obj = self.pool.get('res.groups')
+                    for group in g_names:
+                        if group.startswith('-'):
+                            obj_id = groups_obj.search(cursor, self.user,
+                                    [('name', '=', group[1:])])[0]
+                            groups_value.append((3, obj_id))
+                        else:
+                            obj_id = groups_obj.search(cursor, self.user,
+                                [('name', '=', group)])[0]
+                            groups_value.append((4, obj_id))
+                    values['groups_id'] = groups_value
+                xml_id = rec.getAttribute('id').encode('utf8')
+                self._test_xml_id(xml_id)
+                pid = self.pool.get('ir.model.data')._update(cursor, self.user,
+                        'ir.ui.menu', self.module, values, xml_id,
+                        mode=self.mode, res_id=(res and res[0] or False))
+            elif res:
+                # the menuitem already exists
+                pid = res[0]
+                xml_id = (idx==len(m_l) - 1) \
+                        and rec.getAttribute('id').encode('utf8')
+                self.pool.get('ir.model.data')._update_dummy(cursor,
+                        self.user, 'ir.ui.menu', self.module, xml_id)
+            else:
+                # the menuitem does't exist but we are in branch (not a leaf)
+                pid = self.pool.get('ir.ui.menu').create(cursor, self.user,
+                    {'parent_id': pid, 'name': menu_elem})
+        if rec_id and pid:
+            self.idref[rec_id] = int(pid)
+        if rec.hasAttribute('action') and pid:
+            a_action = rec.getAttribute('action').encode('utf8')
+            a_type = rec.getAttribute('type').encode('utf8') or 'act_window'
+            a_id = self.id_get(cursor, a_action)
+            action = "ir.actions.%s,%d" % (a_type, a_id)
+            self.pool.get('ir.model.data').ir_set(cursor, self.user, 'action',
+                'tree_but_open', 'Menuitem', [('ir.ui.menu', int(pid))],
+                action, True, True, xml_id=rec_id)
+        return ('ir.ui.menu', pid)
+
+    @staticmethod
+    def _assert_equals(i, j, prec=4):
+        return not round(i - j, prec)
+
+    def _tag_assert(self, cursor, rec, data_node=None):
+        if self.isnoupdate(data_node) and self.mode != 'init':
+            return
+
+        rec_model = rec.getAttribute("model").encode('ascii')
+        model = self.pool.get(rec_model)
+        assert model, "The model %s does not exist !" % (rec_model,)
+        rec_id = rec.getAttribute("id").encode('ascii')
+        self._test_xml_id(rec_id)
+        rec_src = rec.getAttribute("search").encode('utf8')
+        rec_src_count = rec.getAttribute("count")
+
+        severity = rec.getAttribute("severity").encode('ascii') or 'info'
+
+        rec_string = rec.getAttribute("string").encode('utf8') or 'unknown'
+
+        ids = None
+        eval_dict = {'ref': _ref(self, cursor)}
+        context = self.get_context(data_node, rec, eval_dict)
+        user = self.get_uid(cursor, self.user, data_node, rec)
+        if len(rec_id):
+            ids = [self.id_get(cursor, rec_id)]
+        elif len(rec_src):
+            args = eval(rec_src, eval_dict)
+            ids = self.pool.get(rec_model).search(cursor, user, args,
+                    context=context)
+            if len(rec_src_count):
+                count = int(rec_src_count)
+                if len(ids) != count:
+                    self.assert_report.record_assertion(False, severity)
+                    Logger().notify_channel('init', severity,
+                            'assertion "' + rec_string + \
+                                    '" failed ! (search count is incorrect: ' \
+                                    + str(len(ids)) + ')' )
+                    sevval = getattr(logging, severity.upper())
+                    if sevval > CONFIG['assert_exit_level']:
+                        # TODO: define a dedicated exception
+                        raise Exception('Severe assertion failure')
+                    return
+
+        assert ids != None, 'You must give either an id or a search criteria'
+
+        ref = _ref(self, cursor)
+        for brrec in model.browse(cursor, user, ids, context):
+
+
+            class Dict(dict):
+
+                def __getitem__(self, key):
+                    if key in brrec:
+                        return brrec[key]
+                    return dict.__getitem__(self, key)
+
+            eval_globals = Dict()
+            eval_globals['floatEqual'] = self._assert_equals
+            eval_globals['ref'] = ref
+            eval_globals['_ref'] = ref
+            for test in [i for i in rec.childNodes \
+                    if (i.nodeType == i.ELEMENT_NODE and i.nodeName=="test")]:
+                f_expr = test.getAttribute("expr").encode('utf-8')
+                f_val = _eval_xml(self, test, self.pool, cursor, user,
+                        self.idref, context=context) or True
+                if eval(f_expr, eval_globals) != f_val: # assertion failed
+                    self.assert_report.record_assertion(False, severity)
+                    Logger().notify_channel('init', severity,
+                            'assertion "' + rec_string + \
+                                    '" failed ! (tag ' + test.toxml() + ')' )
+                    sevval = getattr(logging, severity.upper())
+                    if sevval > CONFIG['assert_exit_level']:
+                        # TODO: define a dedicated exception
+                        raise Exception('Severe assertion failure')
+                    return
+        else: # all tests were successful for this assertion tag (no break)
+            self.assert_report.record_assertion(True, severity)
+
+    def _tag_record(self, cursor, rec, data_node=None):
+        rec_model = rec.getAttribute("model").encode('ascii')
+        model = self.pool.get(rec_model)
+        assert model, "The model %s does not exist !" % (rec_model,)
+        rec_id = rec.getAttribute("id").encode('ascii')
+        self._test_xml_id(rec_id)
+
+#        if not rec_id and not self.isnoupdate(data_node):
+#            print "Warning", rec_model
+
+        if self.isnoupdate(data_node) and not self.mode == 'init':
+            # check if the xml record has an id string
+            if rec_id:
+                obj_id = self.pool.get('ir.model.data')._update_dummy(cursor,
+                        self.user, rec_model, self.module, rec_id)
+                # check if the resource already existed at the last update
+                if obj_id:
+                    # if it existed, we don't update the data, but we need to
+                    # know the id of the existing record anyway
+                    self.idref[rec_id] = int(obj_id)
+                    return None
+                else:
+                    # if the resource didn't exist
+                    if rec.getAttribute("forcecreate"):
+                        # we want to create it, so we let the normal
+                        # "update" behavior happen
+                        pass
+                    else:
+                        # otherwise do nothing
+                        return None
+            else:
+                # otherwise it is skipped
+                return None
+
+        res = {}
+        for field in [i for i in rec.childNodes \
+                if (i.nodeType == i.ELEMENT_NODE and i.nodeName=="field")]:
+            #TODO: most of this code is duplicated above (in _eval_xml)
+            f_name = field.getAttribute("name").encode('utf-8')
+            f_ref = field.getAttribute("ref").encode('ascii')
+            f_search = field.getAttribute("search").encode('utf-8')
+            f_model = field.getAttribute("model").encode('ascii')
+            if not f_model and model._columns.get(f_name, False):
+                f_model = model._columns[f_name]._obj
+            f_use = field.getAttribute("use").encode('ascii') or 'id'
+            f_val = False
+
+            if len(f_search):
+                args = eval(f_search, self.idref)
+                field = []
+                assert f_model, 'Define an attribute ' \
+                        'model="..." in your .XML file !'
+                f_obj = self.pool.get(f_model)
+                # browse the objects searched
+                objs = f_obj.browse(cursor, self.user, f_obj.search(cursor,
+                    self.user, args))
+                # column definitions of the "local" object
+                _cols = self.pool.get(rec_model)._columns
+                # if the current field is many2many
+                if (f_name in _cols) and _cols[f_name]._type=='many2many':
+                    f_val = [(6, 0, [x[f_use] for x in objs])]
+                elif len(objs):
+                    # otherwise (we are probably in a many2one field),
+                    # take the first element of the search
+                    f_val = objs[0][f_use]
+            elif len(f_ref):
+                if f_ref == "null":
+                    f_val = False
+                else:
+                    f_val = self.id_get(cursor, f_ref)
+            else:
+                f_val = _eval_xml(self, field, self.pool, cursor, self.user,
+                        self.idref)
+                if model._columns.has_key(f_name):
+                    if isinstance(model._columns[f_name], Integer):
+                        f_val = int(f_val)
+            res[f_name] = f_val
+        obj_id = self.pool.get('ir.model.data')._update(cursor, self.user,
+                rec_model, self.module, res, rec_id or False,
+                noupdate=self.isnoupdate(data_node), mode=self.mode)
+        if rec_id:
+            self.idref[rec_id] = int(obj_id)
+        return rec_model, obj_id
+
+    def id_get(self, cursor, id_str):
+        if id_str in self.idref:
+            return self.idref[id_str]
+        mod = self.module
+        if '.' in id_str:
+            mod, id_str = id_str.split('.')
+        result = self.pool.get('ir.model.data')._get_id(cursor, self.user,
+                mod, id_str)
+        return int(self.pool.get('ir.model.data').read(cursor, self.user,
+            [result], ['res_id'])[0]['res_id'])
+
+    def parse(self, xmlstr):
+        doc = xml.dom.minidom.parseString(xmlstr)
+        elem = doc.documentElement
+        for node in [i for i in elem.childNodes \
+                if (i.nodeType == i.ELEMENT_NODE and i.nodeName=="data")]:
+            for rec in node.childNodes:
+                if rec.nodeType == rec.ELEMENT_NODE:
+                    if rec.nodeName in self._tags:
+                        try:
+                            self._tags[rec.nodeName](self.cursor, rec, node)
+                        except:
+                            Logger().notify_channel("init", LOG_INFO,
+                                    '\n'+rec.toxml())
+                            self.cursor.rollback()
+                            raise
+        return True
+
+    def __init__(self, cursor, module, idref, mode, report=AssertionReport(),
+            noupdate = False):
+        self.mode = mode
+        self.module = module
+        self.cursor = cursor
+        self.idref = idref
+        self.pool = pooler.get_pool(cursor.dbname)
+        self.user = 1
+        self.assert_report = report
+        self.noupdate = noupdate
+        self._tags = {
+            'menuitem': self._tag_menuitem,
+            'record': self._tag_record,
+            'assert': self._tag_assert,
+            'report': self._tag_report,
+            'wizard': self._tag_wizard,
+            'delete': self._tag_delete,
+            'ir_set': self._tag_ir_set,
+            'function': self._tag_function,
+            'workflow': self._tag_workflow,
+            'act_window': self._tag_act_window,
+        }
+
+def convert_csv_import(cursor, module, fname, csvcontent, idref=None,
+        mode='init', noupdate=False):
+    '''
+    Import csv file :
+        quote: "
+        delimiter: ,
+        encoding: utf-8
+    '''
+    if idref is None:
+        idref = {}
+    model = ('.'.join(fname.split('.')[:-1]).split('-'))[0]
+    #remove folder path from model
+    model = os.path.split(model)[1]
+
+    pool = pooler.get_pool(cursor.dbname)
+
+    input_file = StringIO.StringIO(csvcontent)
+    reader = csv.reader(input_file, quotechar='"', delimiter=',')
+    fields = reader.next()
+
+    if not (mode == 'init' or 'id' in fields):
+        return
+
+    user = 1
+    datas = []
+    for line in reader:
+        if (not line) or not reduce(lambda x, y: x or y, line) :
+            continue
+        datas.append([x.decode('utf8').encode('utf8') for x in line])
+    pool.get(model).import_data(cursor, user, fields, datas, mode,
+            module, noupdate)
+
+def convert_xml_import(cursor, module, xmlstr, idref=None, mode='init',
+        noupdate=False, report=None):
+    if idref is None:
+        idref = {}
+    if report is None:
+        report = AssertionReport()
+    obj = XMLImport(cursor, module, idref, mode, report=report,
+            noupdate=noupdate)
+    obj.parse(xmlstr)
+    del obj
+    return True
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/tools/misc.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,411 @@
+# -*- coding: utf8 -*-
+"""
+Miscelleanous tools used by tryton
+"""
+import os, time, sys
+import inspect
+from config import CONFIG
+import socket
+import zipfile
+
+if sys.version_info[:2] < (2, 4):
+    from threadinglocal import local
+else:
+    from threading import local
+
+def find_in_path(name):
+    if os.name == "nt":
+        sep = ';'
+    else:
+        sep = ':'
+    path = [directory for directory in os.environ['PATH'].split(sep)
+            if os.path.isdir(directory)]
+    for directory in path:
+        val = os.path.join(directory, name)
+        if os.path.isfile(val) or os.path.islink(val):
+            return val
+    return None
+
+def find_pg_tool(name):
+    if CONFIG['pg_path'] and CONFIG['pg_path'] != 'None':
+        return os.path.join(CONFIG['pg_path'], name)
+    else:
+        return find_in_path(name)
+
+def exec_pg_command(name, *args):
+    prog = find_pg_tool(name)
+    if not prog:
+        raise Exception('Couldn\'t find %s' % name)
+    args2 = (os.path.basename(prog),) + args
+    return os.spawnv(os.P_WAIT, prog, args2)
+
+def exec_pg_command_pipe(name, *args):
+    prog = find_pg_tool(name)
+    if not prog:
+        raise Exception('Couldn\'t find %s' % name)
+    if os.name == "nt":
+        cmd = '"' + prog + '" ' + ' '.join(args)
+    else:
+        cmd = prog + ' ' + ' '.join(args)
+    return os.popen2(cmd, 'b')
+
+def exec_command_pipe(name, *args):
+    prog = find_in_path(name)
+    if not prog:
+        raise Exception('Couldn\'t find %s' % name)
+    if os.name == "nt":
+        cmd = '"'+prog+'" '+' '.join(args)
+    else:
+        cmd = prog+' '+' '.join(args)
+    return os.popen2(cmd, 'b')
+
+def file_open(name, mode="r", subdir='addons'):
+    """Open a file from the root dir, using a subdir folder."""
+    root_path = os.path.dirname(os.path.dirname(__file__))
+    if subdir:
+        if subdir == 'addons' and name in ('ir', 'res'):
+            name = os.path.join(root_path, name)
+        else:
+            name = os.path.join(root_path, subdir, name)
+    else:
+        name = os.path.join(root_path, name)
+
+    # Check for a zipfile in the path
+    head = name
+    zipname = False
+    name2 = False
+    while True:
+        head, tail = os.path.split(head)
+        if not tail:
+            break
+        if zipname:
+            zipname = os.path.join(tail, zipname)
+        else:
+            zipname = tail
+        if zipfile.is_zipfile(head+'.zip'):
+            import StringIO
+            zfile = zipfile.ZipFile(head+'.zip')
+            try:
+                return StringIO.StringIO(zfile.read(os.path.join(
+                    os.path.basename(head), zipname).replace(
+                        os.sep, '/')))
+            except:
+                name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
+    for i in (name2, name):
+        if i and os.path.isfile(i):
+            return file(i, mode)
+
+    raise IOError, 'File not found : '+str(name)
+
+def email_send(email_from, email_to, subject, body, email_cc=None,
+        email_bcc=None, reply_to=False, tinycrm=False):
+    """Send an email."""
+    if not email_cc:
+        email_cc = []
+    if not email_bcc:
+        email_bcc = []
+    import smtplib
+    from email.MIMEText import MIMEText
+    from email.Header import Header
+    from email.Utils import formatdate, COMMASPACE
+
+    msg = MIMEText(body or '', _charset='utf-8')
+    msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
+    msg['From'] = email_from
+    del msg['Reply-To']
+    if reply_to:
+        msg['Reply-To'] = msg['From']+', '+reply_to
+    msg['To'] = COMMASPACE.join(email_to)
+    if email_cc:
+        msg['Cc'] = COMMASPACE.join(email_cc)
+    if email_bcc:
+        msg['Bcc'] = COMMASPACE.join(email_bcc)
+    msg['Date'] = formatdate(localtime=True)
+    if tinycrm:
+        msg['Message-Id'] = '<' + str(time.time()) + '-tinycrm-' + \
+                str(tinycrm) + '@' + socket.gethostname() + '>'
+    try:
+        smtp = smtplib.SMTP()
+        smtp.connect(CONFIG['smtp_server'])
+        if CONFIG['smtp_user'] or CONFIG['smtp_password']:
+            smtp.login(CONFIG['smtp_user'], CONFIG['smtp_password'])
+        smtp.sendmail(email_from, email_to + email_cc + email_bcc,
+                msg.as_string())
+        smtp.quit()
+    except Exception, exp:
+        import logging
+        logging.getLogger().info(str(exp))
+    return True
+
+def email_send_attach(email_from, email_to, subject, body, email_cc=None,
+        email_bcc=None, reply_to=False, attach=None,
+        tinycrm=False):
+    """Send an email."""
+    if not email_cc:
+        email_cc = []
+    if not email_bcc:
+        email_bcc = []
+    if not attach:
+        attach = []
+    import smtplib
+    from email.MIMEText import MIMEText
+    from email.MIMEBase import MIMEBase
+    from email.MIMEMultipart import MIMEMultipart
+    from email.Header import Header
+    from email.Utils import formatdate, COMMASPACE
+    from email import Encoders
+
+    msg = MIMEMultipart()
+
+    msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
+    msg['From'] = email_from
+    del msg['Reply-To']
+    if reply_to:
+        msg['Reply-To'] = reply_to
+    msg['To'] = COMMASPACE.join(email_to)
+    if email_cc:
+        msg['Cc'] = COMMASPACE.join(email_cc)
+    if email_bcc:
+        msg['Bcc'] = COMMASPACE.join(email_bcc)
+    if tinycrm:
+        msg['Message-Id'] = '<' + str(time.time()) + '-tinycrm-' + \
+                str(tinycrm) + '@' + socket.gethostname()+'>'
+    msg['Date'] = formatdate(localtime=True)
+    msg.attach( MIMEText(body or '', _charset='utf-8') )
+    for (fname, fcontent) in attach:
+        part = MIMEBase('application', "octet-stream")
+        part.set_payload( fcontent )
+        Encoders.encode_base64(part)
+        part.add_header('Content-Disposition',
+                'attachment; filename="%s"' % (fname,))
+        msg.attach(part)
+    try:
+        smtp = smtplib.SMTP()
+        smtp.connect(CONFIG['smtp_server'])
+        if CONFIG['smtp_user'] or CONFIG['smtp_password']:
+            smtp.login(CONFIG['smtp_user'], CONFIG['smtp_password'])
+        smtp.sendmail(email_from, email_to + email_cc + email_bcc,
+                msg.as_string())
+        smtp.quit()
+    except Exception, exp:
+        import logging
+        logging.getLogger().info(str(exp))
+    return True
+
+def sms_send(user, password, api_id, text, to):
+    "text must be latin-1 encoded"
+    import urllib
+    params = urllib.urlencode({
+        'user': user,
+        'password': password,
+        'api_id': api_id,
+        'text': text,
+        'to':to,
+        })
+    #urllib.urlopen("http://api.clickatell.com/http/sendmsg", params)
+    urllib.urlopen("http://196.7.150.220/http/sendmsg", params)
+    return True
+
+
+class UpdateableStr(local):
+    '''Stores an updateable str to use in wizards'''
+
+    def __init__(self, string=''):
+        super(UpdateableStr, self).__init__()
+        self.string = string
+
+    def __str__(self):
+        return str(self.string)
+
+    def __repr__(self):
+        return str(self.string)
+
+    def __nonzero__(self):
+        return bool(self.string)
+
+
+class UpdateableDict(local):
+    '''Stores an updateable dict to use in wizards'''
+
+    def __init__(self, value=None):
+        super(UpdateableDict, self).__init__()
+        if value is None:
+            value = {}
+        self.dict = value
+
+    def __str__(self):
+        return str(self.dict)
+
+    def __repr__(self):
+        return str(self.dict)
+
+    def clear(self):
+        return self.dict.clear()
+
+    def keys(self):
+        return self.dict.keys()
+
+    def __setitem__(self, i, j):
+        self.dict.__setitem__(i, j)
+
+    def __getitem__(self, i):
+        return self.dict.__getitem__(i)
+
+    def copy(self):
+        return self.dict.copy()
+
+    def iteritems(self):
+        return self.dict.iteritems()
+
+    def iterkeys(self):
+        return self.dict.iterkeys()
+
+    def itervalues(self):
+        return self.dict.itervalues()
+
+    def pop(self, k, d=None):
+        return self.dict.pop(k, d)
+
+    def popitem(self):
+        return self.dict.popitem()
+
+    def setdefault(self, k, d=None):
+        return self.dict.setdefault(k, d)
+
+    def update(self, E, **F):
+        return self.dict.update(E, F)
+
+    def values(self):
+        return self.dict.values()
+
+    def get(self, k, d=None):
+        return self.dict.get(k, d)
+
+    def has_key(self, k):
+        return self.dict.has_key(k)
+
+    def items(self):
+        return self.dict.items()
+
+    def __cmp__(self, y):
+        return self.dict.__cmp__(y)
+
+    def __contains__(self, k):
+        return self.dict.__contains__(k)
+
+    def __delitem__(self, y):
+        return self.dict.__delitem__(y)
+
+    def __eq__(self, y):
+        return self.dict.__eq__(y)
+
+    def __ge__(self, y):
+        return self.dict.__ge__(y)
+
+    def __gt__(self, y):
+        return self.dict.__gt__(y)
+
+    def __hash__(self):
+        return self.dict.__hash__()
+
+    def __iter__(self):
+        return self.dict.__iter__()
+
+    def __le__(self, y):
+        return self.dict.__le__(y)
+
+    def __len__(self):
+        return self.dict.__len__()
+
+    def __lt__(self, y):
+        return self.dict.__lt__(y)
+
+    def __ne__(self, y):
+        return self.dict.__ne__(y)
+
+
+class Cache(object):
+    """
+    Use it as a decorator of the function you plan to cache
+    Timeout: 0 = no timeout, otherwise in seconds
+    """
+
+    def __init__(self, timeout=10000):
+        self.timeout = timeout
+        self.cache = {}
+
+    def __call__(self, function):
+        arg_names = inspect.getargspec(function)[0][2:]
+        def cached_result(self2, cursor=None, *args, **kwargs):
+            if cursor is None:
+                self.cache = {}
+                return True
+
+            # Update named arguments with positional argument values
+            kwargs.update(dict(zip(arg_names, args)))
+            kwargs = kwargs.items()
+            kwargs.sort()
+
+            # Work out key as a tuple of ('argname', value) pairs
+            key = (('dbname', cursor.dbname),) + tuple(kwargs)
+
+            # Check cache and return cached value if possible
+            if key in self.cache:
+                (value, last_time) = self.cache[key]
+                mintime = time.time() - self.timeout
+                if self.timeout <= 0 or mintime <= last_time:
+                    return value
+
+            # Work out new value, cache it and return it
+            # Should copy() this value to avoid futur modf of the cacle ?
+            result = function(self2, cursor, **dict(kwargs))
+
+            self.cache[key] = (result, time.time())
+            return result
+
+        return cached_result
+
+def get_languages():
+    languages = {
+        'zh_CN': 'Chinese (CN)',
+        'zh_TW': 'Chinese (TW)',
+        'cs_CZ': 'Czech',
+        'de_DE': 'Deutsch',
+        'es_AR': 'Español (Argentina)',
+        'es_ES': 'Español (España)',
+        'fr_FR': 'Français',
+        'fr_CH': 'Français (Suisse)',
+        'en_EN': 'English (default)',
+        'hu_HU': 'Hungarian',
+        'it_IT': 'Italiano',
+        'pt_BR': 'Portugese (Brasil)',
+        'pt_PT': 'Portugese (Portugal)',
+        'nl_NL': 'Nederlands',
+        'ro_RO': 'Romanian',
+        'ru_RU': 'Russian',
+        'sv_SE': 'Swedish',
+    }
+    return languages
+
+def scan_languages():
+    import glob
+    file_list = [os.path.splitext(os.path.basename(f))[0] \
+            for f in glob.glob(os.path.join(CONFIG['root_path'],
+                'i18n', '*.csv'))]
+    lang_dict = get_languages()
+    return [(lang, lang_dict.get(lang, lang)) for lang in file_list]
+
+def mod10r(number):
+    """
+    Input number : account or invoice number
+    Output return: the same number completed with the recursive mod10
+    key
+    """
+    codec = [0, 9, 4, 6, 8, 2, 7, 1, 3, 5]
+    report = 0
+    result = ""
+    for digit in number:
+        result += digit
+        if digit.isdigit():
+            report = codec[ (int(digit) + report) % 10 ]
+    return result + str((10 - report) % 10)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/tools/translate.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,381 @@
+import os
+from os.path import join
+import fnmatch
+import csv, xml.dom, re
+from trytond import pooler
+from trytond.netsvc import Logger, LOG_ERROR, LOG_WARNING, LOG_INFO, _SERVICE
+from misc import UpdateableStr, file_open, get_languages
+from trytond.config import CONFIG
+
+
+class TINY(csv.excel):
+    lineterminator = '\n'
+
+csv.register_dialect("TINY", TINY)
+
+# TODO: a caching method
+def translate(cursor, user, name, source_type, lang, source=None):
+    if source and name:
+        cursor.execute('SELECT value FROM ir_translation ' \
+                'WHERE lang = %s AND type = %s AND name = %s AND src = %s',
+                (lang, source_type, str(name), source))
+    elif name:
+        cursor.execute('SELECT value FROM ir_translation ' \
+                'WHERE lang = %s AND type = %s AND name = %s',
+                (lang, source_type, str(name)))
+    elif source:
+        cursor.execute('SELECT value FROM ir_translation ' \
+                'WHERE lang = %s AND type = %s AND src = %s',
+                (lang, source_type, source))
+    res_trans = cursor.fetchone()
+    res = res_trans and res_trans[0] or False
+    return res
+
+def translate_code(cursor, user, source, context):
+    lang = context.get('lang', False)
+    if lang:
+        return translate(cursor, user, None, 'code', lang, source)
+    else:
+        return source
+
+#_ = lambda source: translate_code(cursor, user, source, context)
+
+def trans_parse_view(view):
+    res = []
+    if view.hasAttribute("string"):
+        string = view.getAttribute('string')
+        if string:
+            res.append(string.encode("utf8"))
+    if view.hasAttribute("sum"):
+        string = view.getAttribute('sum')
+        if string:
+            res.append(string.encode("utf8"))
+    for child_view in [i for i in view.childNodes \
+            if (i.nodeType == i.ELEMENT_NODE)]:
+        res.extend(trans_parse_view(child_view))
+    return res
+
+# tests whether an object is in a list of modules
+def in_modules(object_name, modules):
+    if 'all' in modules:
+        return True
+
+    module_dict = {
+        'ir': 'base',
+        'res': 'base',
+        'workflow': 'base',
+    }
+    module = object_name.split('.')[0]
+    module = module_dict.get(module, module)
+    return module in modules
+
+def trans_generate(lang, modules, dbname=None):
+    logger = Logger()
+    if not dbname:
+        dbname = CONFIG['db_name']
+    pool = pooler.get_pool(dbname)
+    trans_obj = pool.get('ir.translation')
+    model_data_obj = pool.get('ir.model.data')
+    cursor = pooler.get_db(dbname).cursor()
+    user = 1
+    objects = pool.obj_pool.items()
+    objects.sort()
+    out = [["type", "name", "res_id", "src", "value"]]
+
+    to_translate = []
+
+    # object fields
+    for obj_name, obj in objects:
+        if in_modules(obj_name, modules):
+            for field_name, field_def in obj._columns.iteritems():
+                name = obj_name + "," + field_name
+                value = ""
+                if lang:
+                    cursor.execute("SELECT * FROM ir_translation " \
+                            "WHERE type = 'field' AND name = %s AND lang = %s",
+                            (name, lang))
+                    res = cursor.dictfetchall()
+                    if len(res):
+                        value = res[0]['value']
+                out.append(["field", name, "0",
+                    field_def.string.encode('utf8'), value])
+                if field_def.help:
+                    value = ''
+                    if lang:
+                        cursor.execute('SELECT * FROM ir_translation ' \
+                                'WHERE type=\'help\' ' \
+                                    'AND name = %s ' \
+                                    'AND lang = %s', (name, lang))
+                        res = cursor.dictfetchall()
+                        if res:
+                            value = res[0]['value']
+                    out.append(['help', name, '0',
+                        field_def.help.encode('utf8'), value])
+                if field_def.translate:
+                    ids = obj.search(obj, cursor, user, [])
+                    obj_values = obj.read(cursor, user, ids, [field_name])
+                    for obj_value in obj_values:
+                        trans = ""
+                        if lang:
+                            cursor.execute("SELECT * " \
+                                    "FROM ir_translation " \
+                                    "WHERE type='model' " \
+                                        "AND name=%s AND res_id=%d AND lang=%s",
+                                        (name, obj_value['id'], lang))
+                            res = cursor.dictfetchall()
+                            if len(res):
+                                trans = res[0]['value']
+
+                        res_id = obj_value['id']
+                        if obj_name in ('ir.model', 'ir.ui.menu'):
+                            res_id = 0
+                        model_data_ids = model_data_obj.search(cursor, user, [
+                            ('model', '=', obj_name),
+                            ('res_id', '=', obj_value['id']),
+                            ])
+                        if model_data_ids:
+                            model_data = model_data_obj.browse(cursor, user,
+                                    model_data_ids[0])
+                            res_id = model_data.module + '.' + model_data.name
+
+                        out.append(["model", name, res_id,
+                            obj_value[field_name], trans])
+                if hasattr(field_def, 'selection') \
+                        and isinstance(field_def.selection, (list, tuple)):
+                    for key, val in field_def.selection:
+                        to_translate.append(["selection", name,
+                            [val.encode('utf8')]])
+
+    # reports (xsl and rml)
+    obj = pool.get("ir.actions.report.xml")
+    for i in obj.read(cursor, user, obj.search(cursor, user, [])):
+        if in_modules(i["model"], modules):
+            name = i["report_name"]
+            fname = ""
+            xmlstr = None
+            report_type = None
+            parse_func = None
+            try:
+                xmlstr = file_open(fname).read()
+                document = xml.dom.minidom.parseString(xmlstr)
+                to_translate.append([report_type, name,
+                    parse_func(document.documentElement)])
+            except IOError:
+                if fname:
+                    logger.notify_channel("init", LOG_WARNING,
+                            "couldn't export translation for report %s %s %s" %\
+                                    (name, report_type, fname))
+    # views
+    obj = pool.get("ir.ui.view")
+    for i in obj.read(cursor, user, obj.search(cursor, user, [])):
+        if in_modules(i["model"], modules):
+            document = xml.dom.minidom.parseString(i['arch'])
+            to_translate.append(["view", i['model'],
+                trans_parse_view(document.documentElement)])
+
+    # wizards
+    for service_name, obj in _SERVICE.iteritems():
+        if service_name.startswith('wizard.'):
+            for state_name, state_def in obj.states.iteritems():
+                if 'result' in state_def:
+                    result = state_def['result']
+                    if result['type'] != 'form':
+                        continue
+
+                    name = obj.wiz_name + ',' + state_name
+
+                    # export fields
+                    for field_name, field_def in result['fields'].iteritems():
+                        if 'string' in field_def:
+                            source = field_def['string']
+                            res_name = name + ',' + field_name
+                            to_translate.append(["wizard_field", res_name,
+                                [source]])
+
+                    # export arch
+                    arch = result['arch']
+                    if not isinstance(arch, UpdateableStr):
+                        document = xml.dom.minidom.parseString(arch)
+                        to_translate.append(["wizard_view", name,
+                            trans_parse_view(document.documentElement)])
+
+                    # export button labels
+                    for but_args in result['state']:
+                        button_name = but_args[0]
+                        button_label = but_args[1]
+                        res_name = name + ',' + button_name
+                        to_translate.append(["wizard_button", res_name,
+                            [button_label]])
+
+    # code
+    for root, dirs, files in os.walk(CONFIG['root_path']):
+        for fname in fnmatch.filter(files, '*.py'):
+            frelativepath = join(root, fname)
+            code_string = file_open(frelativepath, subdir='').read()
+
+# TODO: add support for """ and '''... These should use the DOTALL flag
+# DOTALL
+#     Make the "." special character match any character at all, including a
+#     newline; without this flag, "." will match anything except a newline.
+            # *? is the non-greedy version of the * qualifier
+            for i in re.finditer(
+                '[^a-zA-Z0-9_]_\([\s]*["\'](.*?)["\'][\s]*\)',
+                code_string):
+                source = i.group(1).encode('utf8')
+# TODO: check whether the same string has already been exported
+                res = trans_obj._get_source(cursor, frelativepath, 'code',
+                        lang, source) or ''
+                out.append(["code", frelativepath, "0", source, res])
+
+    # translate strings marked as to be translated
+    for ttype, name, sources in to_translate:
+        for source in sources:
+            trans = trans_obj._get_source(cursor, name, ttype, lang,
+                    source)
+            out.append([ttype, name, "0", source, trans or ''])
+
+    cursor.close()
+    return out
+
+def trans_load(db_name, filename, lang, strict=False):
+    logger = Logger()
+    data = ''
+    try:
+        data = file(filename,'r').read().split('\n')
+    except IOError:
+        logger.notify_channel("init", LOG_ERROR, "couldn't read file")
+    return trans_load_data(db_name, data, lang, strict=strict)
+
+def trans_load_data(db_name, data, lang, strict=False, lang_name=None):
+    logger = Logger()
+    logger.notify_channel("init", LOG_INFO,
+            'loading translation file for language %s' % (lang))
+    pool = pooler.get_pool(db_name)
+    lang_obj = pool.get('res.lang')
+    trans_obj = pool.get('ir.translation')
+    model_data_obj = pool.get('ir.model.data')
+    values_obj = pool.get('ir.values')
+    try:
+        user = 1
+        cursor = pooler.get_db(db_name).cursor()
+
+        ids = lang_obj.search(cursor, user, [('code', '=', lang)])
+        if not ids:
+            if not lang_name:
+                lang_name = lang
+                languages = get_languages()
+                if lang in languages:
+                    lang_name = languages[lang]
+            ids = lang_obj.create(cursor, user, {
+                'code': lang,
+                'name': lang_name,
+                'translatable': 1,
+                })
+        else:
+            lang_obj.write(cursor, user, ids, {'translatable':1})
+        lang_ids = lang_obj.search(cursor, user, [])
+        langs = lang_obj.read(cursor, user, lang_ids)
+        selection = [(x['code'], x['name']) for x in langs]
+
+        values_obj.set(cursor, user, 'meta', 'lang', 'lang',
+                [('res.users', False,)],
+                False, True, False, meta={
+                    'type': 'selection',
+                    'string': 'Language',
+                    'selection': selection,
+                    })
+
+        user_ids = pool.get('res.users').search(cursor, user, [])
+        for user_id in user_ids:
+            values_obj.set(cursor, user, 'meta', 'lang', 'lang',
+                    [('res.users', user_id,)], lang, True, False)
+
+        reader = csv.reader(data)
+        # read the first line of the file (it contains columns titles)
+        for row in reader:
+            first = row
+            break
+
+        # read the rest of the file
+        line = 1
+        for row in reader:
+            line += 1
+            try:
+                # skip empty rows and rows where the translation field is empty
+                if (not row) or (not row[4]):
+                    continue
+
+                # dictionary which holds values for this line of the csv file
+                # {'lang': ..., 'type': ..., 'name': ..., 'res_id': ...,
+                #  'src': ..., 'value': ...}
+                dic = {'lang': lang}
+                for i in range(len(first)):
+                    dic[first[i]] = row[i]
+
+                try:
+                    dic['res_id'] = int(dic['res_id'])
+                except:
+                    model_data_ids = model_data_obj.search(cursor, user, [
+                        ('model', '=', dic['name'].split(',')[0]),
+                        ('module', '=', dic['res_id'].split('.', 1)[0]),
+                        ('name', '=', dic['res_id'].split('.', 1)[1]),
+                        ])
+                    if model_data_ids:
+                        dic['res_id'] = model_data_obj.browse(cursor, user,
+                                model_data_ids[0]).res_id
+                    else:
+                        dic['res_id'] = False
+
+                if dic['type'] == 'model' and not strict:
+                    (model, field) = dic['name'].split(',')
+
+                    # get the ids of the resources of this model which share
+                    # the same source
+                    obj = pool.get(model)
+                    if obj:
+                        obj_ids = obj.search(cursor, user,
+                                [(field, '=', dic['src'])])
+
+                        # if the resource id (res_id) is in that list, use it,
+                        # otherwise use the whole list
+                        obj_ids = (dic['res_id'] in obj_ids) \
+                                and [dic['res_id']] or obj_ids
+                        for res_id in obj_ids:
+                            dic['res_id'] = res_id
+                            trans_ids = trans_obj.search(cursor, user, [
+                                ('lang', '=', lang),
+                                ('type', '=', dic['type']),
+                                ('name', '=', dic['name']),
+                                ('src', '=', dic['src']),
+                                ('res_id', '=', dic['res_id'])
+                            ])
+                            if trans_ids:
+                                trans_obj.write(cursor, user, trans_ids,
+                                        {'value': dic['value']})
+                            else:
+                                trans_obj.create(cursor, user, dic)
+                else:
+                    trans_ids = trans_obj.search(cursor, user, [
+                        ('lang', '=', lang),
+                        ('type', '=', dic['type']),
+                        ('name', '=', dic['name']),
+                        ('src', '=', dic['src'])
+                    ])
+                    if trans_ids:
+                        trans_obj.write(cursor, user, trans_ids,
+                                {'value': dic['value']})
+                    else:
+                        trans_obj.create(cursor, user, dic)
+                cursor.commit()
+            except Exception, exp:
+                logger.notify_channel('init', LOG_ERROR,
+                        'Import error: %s on line %d: %s!' % \
+                                (str(exp), line, row))
+                cursor.rollback()
+                cursor.close()
+                cursor = pooler.get_db(db_name).cursor()
+        cursor.close()
+        logger.notify_channel("init", LOG_INFO,
+                "translation file loaded succesfully")
+    except IOError:
+        logger.notify_channel("init", LOG_ERROR, "couldn't read file")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/trytond.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,203 @@
+# Copyright (c) 2004-2006 TINY SPRL. (http://tiny.be)
+# Copyright (c) 2007 Cedric Krier.
+#
+# WARNING: This program as such is intended to be used by professional
+# programmers who take the whole responsability of assessing all potential
+# consequences resulting from its eventual inadequacies and bugs
+# End users who are looking for a ready-to-use solution with commercial
+# garantees and support are strongly adviced to contact a Free Software
+# Service Company
+#
+# This program is Free Software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
+#
+##############################################################################
+
+"""
+%prog [options]
+"""
+
+import sys, os, signal
+import netsvc
+import time
+import psycopg
+import pooler
+import sql_db
+import config
+from config import CONFIG
+import web_service
+import workflow
+import report
+from module import register_classes
+import osv, security, tools, version
+
+
+class TrytonServer(object):
+
+    def __init__(self):
+        netsvc.init_logger()
+        self.logger = netsvc.Logger()
+
+        if sys.platform == 'win32':
+            import mx.DateTime
+            mx.DateTime.strptime = lambda x, y: mx.DateTime.mktime(
+                    time.strptime(x, y))
+
+        self.logger.notify_channel("objects", netsvc.LOG_INFO,
+                'initialising distributed objects services')
+
+        self.dispatcher = netsvc.Dispatcher()
+        self.dispatcher.monitor(signal.SIGINT)
+
+        self.db_name = CONFIG["db_name"]
+
+        cursor = None
+        try:
+            if self.db_name:
+                cursor = pooler.get_db_only(self.db_name).cursor()
+        except psycopg.OperationalError:
+            self.logger.notify_channel("init", netsvc.LOG_INFO,
+                    "could not connect to database '%s'!" % self.db_name,)
+        if cursor:
+            cursor.execute("SELECT relname " \
+                    "FROM pg_class " \
+                    "WHERE relkind = 'r' AND relname in (" \
+                    "'inherit', "
+                    "'ir_values', "
+                    "'ir_model', "
+                    "'ir_model_fields', "
+                    "'ir_actions', "
+                    "'ir_act_window', "
+                    "'ir_act_report_xml', "
+                    "'ir_act_report_custom', "
+                    "'ir_act_group', "
+                    "'ir_act_group_link', "
+                    "'ir_act_execute', "
+                    "'ir_act_wizard', "
+                    "'ir_act_url', "
+                    "'ir_ui_view', "
+                    "'ir_ui_menu', "
+                    "'res_users', "
+                    "'res_groups', "
+                    "'res_roles', "
+                    "'res_roles_users_rel', "
+                    "'res_groups_users_rel', "
+                    "'wkf', "
+                    "'wkf_activity', "
+                    "'wkf_transition', "
+                    "'wkf_instance', "
+                    "'wkf_workitem', "
+                    "'wkf_witm_trans', "
+                    "'wkf_logs', "
+                    "'ir_module_category', "
+                    "'ir_module_module', "
+                    "'ir_module_module_dependency'"
+                    ")")
+            if len(cursor.fetchall()) == 0:
+                self.logger.notify_channel("init", netsvc.LOG_INFO, "init db")
+                sql_db.init_db(cursor)
+                # in that case, force --init=all
+                CONFIG["init"]["all"] = 1
+                CONFIG['update']['all'] = 1
+                if not CONFIG['without_demo']:
+                    CONFIG["demo"]['all'] = 1
+            cursor.commit()
+            cursor.close()
+
+        web_service.DB()
+        web_service.Common()
+        web_service.Object()
+        web_service.Wizard()
+        web_service.Report()
+
+        workflow.WorkflowService()
+
+        # TODO add report service
+
+    def run(self):
+
+        register_classes()
+        if self.db_name:
+            pooler.get_db_and_pool(self.db_name,
+                    update_module=bool(CONFIG['init'] or CONFIG['update']))
+
+        if CONFIG["stop_after_init"]:
+            sys.exit(0)
+
+        # Launch Server
+        secure = CONFIG["secure"]
+        if CONFIG['xmlrpc']:
+            interface = CONFIG["interface"]
+            try:
+                port = int(CONFIG["port"])
+            except:
+                self.logger.notify_channel("init", netsvc.LOG_ERROR,
+                        "invalid port '%s'!" % (CONFIG["port"],))
+                sys.exit(1)
+
+            httpd = netsvc.HttpDaemon(interface, port, secure)
+
+            xml_gw = netsvc.XmlRpc.RpcGateway('web-services')
+            httpd.attach("/xmlrpc", xml_gw )
+            self.logger.notify_channel("web-services", netsvc.LOG_INFO,
+                        "starting XML-RPC" + \
+                                (CONFIG['secure'] and ' Secure' or '') + \
+                                " services, port " + str(port))
+
+        if CONFIG['netrpc']:
+            netinterface = CONFIG["netinterface"]
+            try:
+                netport = int(CONFIG["netport"])
+            except Exception:
+                self.logger.notify_channel("init", netsvc.LOG_ERROR,
+                        "invalid port '%s'!" % (CONFIG["netport"],))
+                sys.exit(1)
+
+            tinysocket = netsvc.TinySocketServerThread(netinterface, netport,
+                    False)
+            self.logger.notify_channel("web-services", netsvc.LOG_INFO,
+                    "starting netrpc service, port " + str(netport))
+
+        def handler(signum, frame):
+            if CONFIG['netrpc']:
+                tinysocket.stop()
+            if CONFIG['xmlrpc']:
+                httpd.stop()
+            netsvc.Agent.quit()
+            if CONFIG['pidfile']:
+                os.unlink(CONFIG['pidfile'])
+            sys.exit(0)
+
+        if CONFIG['pidfile']:
+            fd_pid = open(CONFIG['pidfile'], 'w')
+            pidtext = "%d" % (os.getpid())
+            fd_pid.write(pidtext)
+            fd_pid.close()
+
+        signal.signal(signal.SIGINT, handler)
+        signal.signal(signal.SIGTERM, handler)
+
+        self.logger.notify_channel("web-services", netsvc.LOG_INFO,
+                'the server is running, waiting for connections...')
+        if CONFIG['netrpc']:
+            tinysocket.start()
+        if CONFIG['xmlrpc']:
+            httpd.start()
+        #DISPATCHER.run()
+
+if __name__ == "__main__":
+    SERVER = TrytonServer()
+    SERVER.run()
+    while True:
+        time.sleep(1)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/version.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,5 @@
+PACKAGE = "trytond"
+VERSION = "0.0.1"
+LICENSE = "GPL v2"
+WEBSITE = "http://www.tryton.org"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/web_service/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,5 @@
+from db import *
+from common import *
+from object import *
+from wizard import *
+from report import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/web_service/common.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,33 @@
+from trytond.netsvc import Service, Logger, LOG_INFO
+from trytond import security
+from trytond.version import VERSION
+import time
+
+
+class Common(Service):
+
+    def __init__(self, name="common"):
+        Service.__init__(self, name)
+        Service.join_group(self, "web-services")
+        Service.export_method(self, self.about)
+        Service.export_method(self, self.login)
+        Service.export_method(self, self.timezone_get)
+
+    def login(self, database, login, password):
+        res = security.login(database, login, password)
+        logger = Logger()
+        msg = res and 'successful login' or 'bad login or password'
+        logger.notify_channel("web-service", LOG_INFO,
+                "%s from '%s' using database '%s'" % (msg, login, database))
+        return res or False
+
+    def about(self):
+        return '''
+Tryton %s
+
+The whole source code is distributed under the terms of the
+GNU Public Licence v2.
+''' % (VERSION,)
+
+    def timezone_get(self):
+        return time.tzname[0]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/web_service/db.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,258 @@
+from trytond.netsvc import Service, Logger, LOG_ERROR, LOG_INFO, LOG_WARNING
+import threading
+from trytond import security
+from trytond import sql_db
+from trytond import pooler
+from trytond import tools
+import base64
+import os
+
+
+class DB(Service):
+
+    def __init__(self, name="db"):
+        super(DB, self).__init__(name)
+        self.join_group("web-services")
+        self.export_method(self.create)
+        self.export_method(self.get_progress)
+        self.export_method(self.drop)
+        self.export_method(self.dump)
+        self.export_method(self.restore)
+        self.export_method(self.list)
+        self.export_method(self.list_lang)
+        self.export_method(self.change_admin_password)
+        self.actions = {}
+        self._id = 0
+        self.id_protect = threading.Semaphore()
+
+    def create(self, password, db_name, demo, lang):
+        security.check_super(password)
+        self.id_protect.acquire()
+        self._id += 1
+        db_id = self._id
+        self.id_protect.release()
+
+        self.actions[db_id] = {'clean': False}
+
+        database = sql_db.db_connect('template1', serialize=1)
+        database.truedb.autocommit()
+        cursor = database.cursor()
+        cursor.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
+        cursor.close()
+
+
+        class DBInitialize(object):
+            def __call__(self, service, db_id, db_name, demo, lang):
+                try:
+                    service.actions[db_id]['progress'] = 0
+                    cursor = sql_db.db_connect(db_name).cursor()
+                    sql_db.init_db(cursor)
+                    cursor.commit()
+                    cursor.close()
+                    pooler.get_pool(db_name, demo, service.actions[db_id],
+                            update_module=True)
+                    if lang and lang != 'en_US':
+                        filename = tools.CONFIG["root_path"] + "/i18n/" + \
+                                lang + ".csv"
+                        tools.trans_load(db_name, filename, lang)
+                    service.actions[db_id]['clean'] = True
+                    cursor = sql_db.db_connect(db_name).cursor()
+                    cursor.execute('select login, password, name ' \
+                            'from res_users ' \
+                            'where login <> \'root\' order by login')
+                    service.actions[db_id]['users'] = cursor.dictfetchall()
+                    cursor.close()
+                except Exception, exp:
+                    service.actions[db_id]['clean'] = False
+                    service.actions[db_id]['exception'] = exp
+                    from StringIO import StringIO
+                    import traceback
+                    e_str = StringIO()
+                    traceback.print_exc(file=e_str)
+                    traceback_str = e_str.getvalue()
+                    e_str.close()
+                    service.actions[db_id]['traceback'] = traceback_str
+                    cursor.close()
+
+        logger = Logger()
+        logger.notify_channel("web-services", LOG_INFO,
+                'CREATE DB: %s' % (db_name))
+        dbi = DBInitialize()
+        create_thread = threading.Thread(target=dbi,
+                args=(self, db_id, db_name, demo, lang))
+        create_thread.start()
+        self.actions[db_id]['thread'] = create_thread
+        return db_id
+
+    def get_progress(self, password, db_id):
+        security.check_super(password)
+        if self.actions[db_id]['thread'].isAlive():
+#            return addons.init_progress[db_name]
+            return (min(self.actions[db_id].get('progress', 0), 0.95), [])
+        else:
+            clean = self.actions[db_id]['clean']
+            if clean:
+                users = self.actions[db_id]['users']
+                del self.actions[db_id]
+                return (1.0, users)
+            else:
+                exp = self.actions[db_id]['exception']
+                del self.actions[db_id]
+                raise Exception, exp
+
+    def drop(self, password, db_name):
+        security.check_super(password)
+        pooler.close_db(db_name)
+        logger = Logger()
+
+        database = sql_db.db_connect('template1', serialize=1)
+        database.truedb.autocommit()
+        cursor = database.cursor()
+        try:
+            try:
+                cursor.execute('DROP DATABASE ' + db_name)
+            except:
+                logger.notify_channel("web-service", LOG_ERROR,
+                    'DROP DB: %s failed' % (db_name,))
+                raise
+            else:
+                logger.notify_channel("web-services", LOG_INFO,
+                    'DROP DB: %s' % (db_name))
+        finally:
+            cursor.close()
+        return True
+
+    def dump(self, password, db_name):
+        security.check_super(password)
+        logger = Logger()
+
+        if tools.CONFIG['db_password']:
+            logger.notify_channel("web-service", LOG_ERROR,
+                    'DUMP DB: %s doesn\'t work with password' % (db_name,))
+            raise Exception, "Couldn't dump database with password"
+
+        cmd = ['pg_dump', '--format=c']
+        if tools.CONFIG['db_user']:
+            cmd.append('--username=' + tools.CONFIG['db_user'])
+        if tools.CONFIG['db_host']:
+            cmd.append('--host=' + tools.CONFIG['db_host'])
+        if tools.CONFIG['db_port']:
+            cmd.append('--port=' + tools.CONFIG['db_port'])
+        cmd.append(db_name)
+
+        stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
+        stdin.close()
+        data = stdout.read()
+        res = stdout.close()
+        if res:
+            logger.notify_channel("web-service", LOG_ERROR,
+                    'DUMP DB: %s failed\n%s' % (db_name, data))
+            raise Exception, "Couldn't dump database"
+        logger.notify_channel("web-services", LOG_INFO,
+                'DUMP DB: %s' % (db_name))
+        return base64.encodestring(data)
+
+    def restore(self, password, db_name, data):
+        security.check_super(password)
+        logger = Logger()
+
+        if self.db_exist(db_name):
+            logger.notify_channel("web-service", LOG_WARNING,
+                    'RESTORE DB: %s already exists' % (db_name,))
+            raise Exception, "Database already exists"
+
+        if tools.CONFIG['db_password']:
+            logger.notify_channel("web-service", LOG_ERROR,
+                    'RESTORE DB: %s doesn\'t work with password' % (db_name,))
+            raise Exception, "Couldn't restore database with password"
+
+        database = sql_db.db_connect('template1', serialize=1)
+        database.truedb.autocommit()
+        cursor = database.cursor()
+        cursor.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
+        cursor.close()
+
+        cmd = ['pg_restore']
+        if tools.CONFIG['db_user']:
+            cmd.append('--username=' + tools.CONFIG['db_user'])
+        if tools.CONFIG['db_host']:
+            cmd.append('--host=' + tools.CONFIG['db_host'])
+        if tools.CONFIG['db_port']:
+            cmd.append('--port=' + tools.CONFIG['db_port'])
+        cmd.append('--dbname=' + db_name)
+        args2 = tuple(cmd)
+
+        buf = base64.decodestring(data)
+        if os.name == "nt":
+            tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
+            file(tmpfile, 'wb').write(buf)
+            args2 = list(args2)
+            args2.append(' ' + tmpfile)
+            args2 = tuple(args2)
+        stdin, stdout = tools.exec_pg_command_pipe(*args2)
+        if not os.name == "nt":
+            stdin.write(base64.decodestring(data))
+        stdin.close()
+        res = stdout.close()
+        if res:
+            raise Exception, "Couldn't restore database"
+        logger.notify_channel("web-services", LOG_INFO,
+                'RESTORE DB: %s' % (db_name))
+        return True
+
+    def db_exist(self, db_name):
+        try:
+            database = sql_db.db_connect(db_name)
+            database.truedb.close()
+            return True
+        except:
+            return False
+
+    def list(self):
+        database = sql_db.db_connect('template1')
+        try:
+            cursor = database.cursor()
+            db_user = tools.CONFIG["db_user"]
+            if not db_user and os.name == 'posix':
+                import pwd
+                db_user = pwd.getpwuid(os.getuid())[0]
+            if not db_user:
+                cursor.execute("SELECT usename " \
+                        "FROM pg_user " \
+                        "WHERE usesysid = (" \
+                            "SELECT datdba " \
+                            "FROM pg_database " \
+                            "WHERE datname = %s)",
+                            (tools.CONFIG["db_name"],))
+                res = cursor.fetchone()
+                db_user = res and res[0]
+            if db_user:
+                cursor.execute("SELECT datname " \
+                        "FROM pg_database " \
+                        "WHERE datdba = (" \
+                            "SELECT usesysid " \
+                            "FROM pg_user " \
+                            "WHERE usename=%s) " \
+                            "AND datname not in " \
+                                "('template0', 'template1', 'postgres')",
+                                (db_user,))
+            else:
+                cursor.execute("SELECT datname " \
+                        "FROM pg_database " \
+                        "WHERE datname not in " \
+                            "('template0', 'template1','postgres')")
+            res = [name for (name,) in cursor.fetchall()]
+            cursor.close()
+        except:
+            res = []
+        database.truedb.close()
+        return res
+
+    def change_admin_password(self, old_password, new_password):
+        security.check_super(old_password)
+        tools.CONFIG['admin_passwd'] = new_password
+        tools.CONFIG.save()
+        return True
+
+    def list_lang(self):
+        return tools.scan_languages()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/web_service/object.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,31 @@
+from trytond.netsvc import Service, LocalService
+from trytond import security
+
+class Object(Service):
+
+    def __init__(self, name="object"):
+        Service.__init__(self, name)
+        Service.join_group(self, 'web-services')
+        Service.export_method(self, self.execute)
+        Service.export_method(self, self.exec_workflow)
+        Service.export_method(self, self.obj_list)
+
+    def exec_workflow(self, database, user, passwd, object_name, method,
+            object_id):
+        security.check(database, user, passwd)
+        service = LocalService("object_proxy")
+        res = service.exec_workflow(database, user, object_name, method,
+                object_id)
+        return res
+
+    def execute(self, database, user, passwd, object_name, method, *args):
+        security.check(database, user, passwd)
+        service = LocalService("object_proxy")
+        res = service.execute(database, user, object_name, method, *args)
+        return res
+
+    def obj_list(self, database, user, passwd):
+        security.check(database, user, passwd)
+        service = LocalService("object_proxy")
+        res = service.obj_list()
+        return res
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/web_service/report.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,82 @@
+from trytond.netsvc import Service, LocalService
+from trytond import security
+import threading
+import thread
+from trytond import pooler
+from trytond.config import CONFIG
+import base64
+
+
+class Report(Service):
+
+    def __init__(self, name='report'):
+        Service.__init__(self, name)
+        Service.join_group(self, 'web-services')
+        Service.export_method(self, self.report)
+        Service.export_method(self, self.report_get)
+        self._reports = {}
+        self.max_id = 0
+        self.id_protect = threading.Semaphore()
+
+    def report(self, database, user, passwd, object_name, ids, datas=None,
+            context=None):
+        if datas is None:
+            datas = {}
+        if context is None:
+            context = {}
+        security.check(database, user, passwd)
+
+        self.id_protect.acquire()
+        self.max_id += 1
+        report_id = self.max_id
+        self.id_protect.release()
+
+        self._reports[report_id] = {
+                'user': user,
+                'result': False,
+                'state': False,
+                }
+
+        def _go(report_id, user, ids, datas, context):
+            cursor = pooler.get_db(database).cursor()
+            obj = LocalService('report.' + object_name)
+            (result, format) = obj.create(cursor, user, ids, datas, context)
+            cursor.close()
+            self._reports[report_id]['result'] = result
+            self._reports[report_id]['format'] = format
+            self._reports[report_id]['state'] = True
+            return True
+
+        thread.start_new_thread(_go, (report_id, user, ids, datas, context))
+        return report_id
+
+    def _check_report(self, report_id):
+        result = self._reports[report_id]
+        res = {'state': result['state']}
+        if res['state']:
+            if CONFIG['reportgz']:
+                import zlib
+                res2 = zlib.compress(result['result'])
+                res['code'] = 'zlib'
+            else:
+                #CHECKME: why is this needed???
+                if isinstance(result['result'], unicode):
+                    res2 = result['result'].encode('latin1', 'replace')
+                else:
+                    res2 = result['result']
+            if res2:
+                res['result'] = base64.encodestring(res2)
+            res['format'] = result['format']
+            del self._reports[report_id]
+        return res
+
+    def report_get(self, database, user, passwd, report_id):
+        security.check(database, user, passwd)
+
+        if report_id in self._reports:
+            if self._reports[report_id]['uid'] == user:
+                return self._check_report(report_id)
+            else:
+                raise Exception, 'AccessDenied'
+        else:
+            raise Exception, 'ReportNotFound'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/web_service/wizard.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,42 @@
+from trytond.netsvc import Service, LocalService
+from trytond import security
+
+
+class Wizard(Service):
+
+    def __init__(self, name='wizard'):
+        Service.__init__(self, name)
+        Service.join_group(self, 'web-services')
+        Service.export_method(self, self.execute)
+        Service.export_method(self, self.create)
+        self.max_id = 0
+        self.wiz_datas = {}
+        self.wiz_name = {}
+        self.wiz_uid = {}
+
+    def _execute(self, database, user, wiz_id, datas, action, context):
+        self.wiz_datas[wiz_id].update(datas)
+        wiz = LocalService('wizard.'+self.wiz_name[wiz_id])
+        return wiz.execute(database, user, self.wiz_datas[wiz_id], action,
+                context)
+
+    def create(self, database, user, passwd, wiz_name, datas=None):
+        if not datas:
+            datas = {}
+        security.check(database, user, passwd)
+        # FIXME: this is not thread-safe
+        self.max_id += 1
+        self.wiz_datas[self.max_id] = {}
+        self.wiz_name[self.max_id] = wiz_name
+        self.wiz_uid[self.max_id] = user
+        return self.max_id
+
+    def execute(self, database, user, passwd, wiz_id, datas, *args):
+        security.check(database, user, passwd)
+        if wiz_id in self.wiz_uid:
+            if self.wiz_uid[wiz_id] == user:
+                return self._execute(database, user, wiz_id, datas, *args)
+            else:
+                raise Exception, 'AccessDenied'
+        else:
+            raise Exception, 'WizardNotFound'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/workflow/__init__.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,1 @@
+from wkf_service import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/workflow/instance.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,64 @@
+import workitem
+
+def create(cursor, ident, wkf_id):
+    (user, res_type, res_id) = ident
+    cursor.execute("SELECT NEXTVAL('wkf_instance_id_seq')")
+    (id_new,) = cursor.fetchone()
+    cursor.execute('INSERT INTO wkf_instance ' \
+            '(id, res_type, res_id, uid, wkf_id) VALUES (%d,%s,%s,%s,%s)',
+            (id_new, res_type, res_id, user, wkf_id))
+    cursor.execute('SELECT * FROM wkf_activity ' \
+            'WHERE flow_start = True and wkf_id = %d', (wkf_id,))
+    res = cursor.dictfetchall()
+    workitem.create(cursor, res, id_new, ident)
+    update(cursor, id_new, ident)
+    return id_new
+
+def delete(cursor, ident):
+    (user, res_type, res_id) = ident
+    cursor.execute('DELETE FROM wkf_instance ' \
+            'WHERE res_id = %d AND res_type = %s', (res_id, res_type))
+
+def validate(cursor, inst_id, ident, signal, force_running=False):
+    cursor.execute("SELECT * FROM wkf_workitem WHERE inst_id = %d", (inst_id,))
+    for witem in cursor.dictfetchall():
+        workitem.process(cursor, witem, ident, signal, force_running)
+    return _update_end(cursor, inst_id, ident)
+
+def update(cursor, inst_id, ident):
+    cursor.execute("SELECT * FROM wkf_workitem WHERE inst_id = %d", (inst_id,))
+    for witem in cursor.dictfetchall():
+        workitem.process(cursor, witem, ident)
+    return _update_end(cursor, inst_id, ident)
+
+def _update_end(cursor, inst_id, ident):
+    cursor.execute('SELECT state, flow_stop FROM wkf_workitem w ' \
+            'LEFT JOIN wkf_activity a ' \
+                'ON (a.id = w.act_id) WHERE w.inst_id = %d', (inst_id,))
+    res = True
+    for row in cursor.fetchall():
+        if (row[0] != 'complete') or not row[1]:
+            res = False
+            break
+    if res:
+        cursor.execute('SELECT DISTINCT a.name FROM wkf_activity a ' \
+                'LEFT JOIN wkf_workitem w ' \
+                    'ON (a.id = w.act_id) ' \
+                'WHERE w.inst_id = %d', (inst_id,))
+        act_names = cursor.fetchall()
+        cursor.execute("UPDATE wkf_instance " \
+                "SET state = 'complete' WHERE id = %d", (inst_id,))
+        cursor.execute("UPDATE wkf_workitem " \
+                "SET state = 'complete' WHERE subflow_id = %d", (inst_id,))
+        # TODO remove the subquery
+        cursor.execute("SELECT i.id, w.osv, i.res_id " \
+                "FROM wkf_instance i " \
+                    "LEFT JOIN wkf w ON (i.wkf_id = w.id) " \
+                "WHERE i.id IN (" \
+                    "SELECT inst_id FROM wkf_workitem " \
+                    "WHERE subflow_id = %d)", (inst_id,))
+        for i in cursor.fetchall():
+            for act_name in act_names:
+                validate(cursor, i[0], (ident[0], i[1], i[2]),
+                        'subflow.' + act_name[0])
+    return res
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/workflow/wkf_expr.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,73 @@
+from trytond import netsvc
+from trytond import pooler
+
+
+class EnvCall(object):
+
+    def __init__(self, wf_service, d_arg):
+        self.wf_service = wf_service
+        self.d_arg = d_arg
+
+    def __call__(self, *args):
+        arg = self.d_arg + args
+        return self.wf_service.execute_cr(*arg)
+
+
+class Env(dict):
+
+    def __init__(self, wf_service, cursor, user, model, ids):
+        super(Env, self).__init__()
+        self.wf_service = wf_service
+        self.cursor = cursor
+        self.user = user
+        self.model = model
+        self.ids = ids
+        self.obj = pooler.get_pool(cursor.dbname).get(model)
+        self.columns = self.obj._columns.keys() + \
+                self.obj._inherit_fields.keys()
+
+    def __getitem__(self, key):
+        if (key in self.columns) and (not super(Env, self).__contains__(key)):
+            res = self.wf_service.execute_cr(self.cursor, self.user,
+                    self.model, 'read', self.ids, [key])[0][key]
+            super(Env, self).__setitem__(key, res)
+            return res
+        elif key in dir(self.obj):
+            return EnvCall(self.wf_service, (self.cursor, self.user, self.model,
+                key, self.ids))
+        else:
+            return super(Env, self).__getitem__(key)
+
+def eval_expr(cursor, ident, action):
+    res = False
+    for line in action.split('\n'):
+        user = ident[0]
+        model = ident[1]
+        ids = [ident[2]]
+        if line == 'True':
+            res = True
+        elif line =='False':
+            res = False
+        else:
+            wf_service = netsvc.LocalService("object_proxy")
+            env = Env(wf_service, cursor, user, model, ids)
+            res = eval(line, env)
+    return res
+
+def execute(cursor, ident, activity):
+    return eval_expr(cursor, ident, activity['action'])
+
+def check(cursor, ident, transition, signal):
+    res = True
+    if transition['signal']:
+        res = (signal == transition['signal'])
+
+    if transition['role_id']:
+        user = ident[0]
+        serv = netsvc.LocalService('object_proxy')
+        user_roles = serv.execute_cr(cursor, user, 'res.users', 'read', [user],
+                ['roles_id'])[0]['roles_id']
+        res = res and serv.execute_cr(cursor, user, 'res.roles', 'check',
+                user_roles, transition['role_id'])
+    res = res and eval_expr(cursor, ident, transition['condition'])
+    return res
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/workflow/wkf_service.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,108 @@
+"Workflow service"
+from trytond.netsvc import Service
+import instance
+
+
+class WorkflowService(Service):
+    "Workflow service"
+
+    def __init__(self, name='workflow'):
+        Service.__init__(self, name)
+        Service.export_method(self, self.clear_cache)
+        Service.export_method(self, self.trg_write)
+        Service.export_method(self, self.trg_delete)
+        Service.export_method(self, self.trg_create)
+        Service.export_method(self, self.trg_validate)
+        Service.export_method(self, self.trg_redirect)
+        Service.export_method(self, self.trg_trigger)
+        self.wkf_on_create_cache = {}
+
+    def clear_cache(self, cursor):
+        "Clear workflow cache"
+        self.wkf_on_create_cache[cursor.dbname] = {}
+
+    @staticmethod
+    def trg_write(user, res_type, res_id, cursor):
+        "Trigger write"
+        ident = (user, res_type, res_id)
+        cursor.execute('SELECT id FROM wkf_instance ' \
+                'WHERE res_id = %d AND res_type = %s AND state = %s',
+                (res_id, res_type, 'active'))
+        for (instance_id,) in cursor.fetchall():
+            instance.update(cursor, instance_id, ident)
+
+    @staticmethod
+    def trg_trigger(user, res_type, res_id, cursor):
+        "Trigger trigger"
+        cursor.execute('SELECT instance_id FROM wkf_triggers ' \
+                'WHERE res_id = %d AND model = %s', (res_id, res_type))
+        # TODO remove the query from for statement
+        for (instance_id,) in cursor.fetchall():
+            cursor.execute('SELECT user, res_type, res_id FROM wkf_instance ' \
+                    'WHERE id = %d', (instance_id,))
+            ident = cursor.fetchone()
+            instance.update(cursor, instance_id, ident)
+
+    @staticmethod
+    def trg_delete(user, res_type, res_id, cursor):
+        "Trigger delete"
+        ident = (user, res_type, res_id)
+        instance.delete(cursor, ident)
+
+    def trg_create(self, user, res_type, res_id, cursor):
+        "Trigger create"
+        ident = (user, res_type, res_id)
+        self.wkf_on_create_cache.setdefault(cursor.dbname, {})
+        if res_type in self.wkf_on_create_cache[cursor.dbname]:
+            wkf_ids = self.wkf_on_create_cache[cursor.dbname][res_type]
+        else:
+            cursor.execute('SELECT id FROM wkf ' \
+                    'WHERE osv = %s AND on_create = True', (res_type,))
+            wkf_ids = cursor.fetchall()
+            self.wkf_on_create_cache[cursor.dbname][res_type] = wkf_ids
+        for (wkf_id,) in wkf_ids:
+            instance.create(cursor, ident, wkf_id)
+
+    @staticmethod
+    def trg_validate(user, res_type, res_id, signal, cursor):
+        "Trigger validate"
+        ident = (user, res_type, res_id)
+        # ids of all active workflow instances
+        # for a corresponding resource (id, model_name)
+        cursor.execute('SELECT id FROM wkf_instance ' \
+                'WHERE res_id = %d AND res_type = %s AND state = %s',
+                (res_id, res_type, 'active'))
+        for (instance_id,) in cursor.fetchall():
+            instance.validate(cursor, instance_id, ident, signal)
+
+    @staticmethod
+    def trg_redirect(user, res_type, res_id, new_rid, cursor):
+        """
+        Trigger redirect
+        make all workitems which are waiting for a (subflow) workflow instance
+        for the old resource point to the (first active) workflow instance for
+        the new resource
+        """
+        # get ids of wkf instances for the old resource (res_id)
+        # XXX shouldn't we get only active instances?
+        cursor.execute('SELECT id, wkf_id FROM wkf_instance ' \
+                'WHERE res_id = %d AND res_type = %s', (res_id, res_type))
+        for old_inst_id, wkf_id in cursor.fetchall():
+            # first active instance for new resource (new_rid), using same wkf
+            cursor.execute(
+                'SELECT id '\
+                'FROM wkf_instance '\
+                'WHERE res_id = %d AND res_type = %s ' \
+                    'AND wkf_id = %d AND state = %s',
+                (new_rid, res_type, wkf_id, 'active'))
+            new_id = cursor.fetchone()
+            if new_id:
+                # select all workitems which "wait" for the old instance
+                cursor.execute('SELECT id FROM wkf_workitem ' \
+                        'WHERE subflow_id = %d', (old_inst_id,))
+                for (item_id,) in cursor.fetchall():
+                    # redirect all those workitems
+                    # to the wkf instance of the new resource
+                    cursor.execute('UPDATE wkf_workitem ' \
+                            'SET subflow_id = %d ' \
+                            'WHERE id = %d', (new_id[0], item_id))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trytond/workflow/workitem.py	Mon Dec 10 01:04:13 2007 +0100
@@ -0,0 +1,184 @@
+import wkf_expr
+import instance
+
+# TODO: remove triggers
+
+def create(cursor, act_datas, inst_id, ident):
+    for act in act_datas:
+        cursor.execute("SELECT NEXTVAL('wkf_workitem_id_seq')")
+        (id_new,) = cursor.fetchone()
+        cursor.execute("INSERT INTO wkf_workitem " \
+                "(id, act_id, inst_id, state) VALUES (%d, %s, %s, 'active')",
+                (id_new, act['id'], inst_id))
+        cursor.execute('SELECT * FROM wkf_workitem WHERE id=%d', (id_new,))
+        res = cursor.dictfetchone()
+        process(cursor, res, ident)
+
+def process(cursor, workitem, ident, signal=None, force_running=False):
+    cursor.execute('SELECT * FROM wkf_activity WHERE id = %d',
+            (workitem['act_id'],))
+    activity = cursor.dictfetchone()
+    triggers = False
+    if workitem['state'] == 'active':
+        triggers = True
+        if not _execute(cursor, workitem, activity, ident):
+            return False
+
+    if workitem['state'] == 'running':
+        pass
+
+    if workitem['state'] == 'complete' or force_running:
+        res = _split_test(cursor, workitem, activity['split_mode'], ident,
+                signal)
+        triggers = triggers and not res
+
+    if triggers:
+        cursor.execute('SELECT * FROM wkf_transition ' \
+                'WHERE act_from = %d', (workitem['act_id'],))
+        alltrans = cursor.dictfetchall()
+        for trans in alltrans:
+            if trans['trigger_model']:
+                ids = wkf_expr.eval_expr(cursor, ident,