[cig-commits] r7737 - in cs/pythia/trunk/opal: conf/project_template conf/urls contrib contrib/auth contrib/auth/handlers contrib/comments contrib/comments/views contrib/contenttypes contrib/csrf contrib/flatpages contrib/humanize/templatetags contrib/sessions contrib/sites contrib/syndication core core/cache core/cache/backends core/handlers core/serializers core/servers db db/backends db/backends/ado_mssql db/backends/dummy db/backends/mysql db/backends/oracle db/backends/postgresql db/backends/postgresql_psycopg2 db/backends/sqlite3 db/models db/models/fields dispatch forms http middleware scripts shortcuts sites template template/loaders templatetags utils utils/simplejson views views/generic

leif at geodynamics.org leif at geodynamics.org
Mon Jul 23 17:33:07 PDT 2007


Author: leif
Date: 2007-07-23 17:33:05 -0700 (Mon, 23 Jul 2007)
New Revision: 7737

Added:
   cs/pythia/trunk/opal/contrib/formtools/
   cs/pythia/trunk/opal/contrib/localflavor/
   cs/pythia/trunk/opal/contrib/sitemaps/
   cs/pythia/trunk/opal/core/serializers/pyyaml.py
   cs/pythia/trunk/opal/db/backends/mysql_old/
   cs/pythia/trunk/opal/utils/itercompat.py
   cs/pythia/trunk/opal/utils/simplejson/jsonfilter.py
Modified:
   cs/pythia/trunk/opal/conf/project_template/settings.py
   cs/pythia/trunk/opal/conf/project_template/urls.py
   cs/pythia/trunk/opal/conf/urls/defaults.py
   cs/pythia/trunk/opal/contrib/auth/__init__.py
   cs/pythia/trunk/opal/contrib/auth/create_superuser.py
   cs/pythia/trunk/opal/contrib/auth/decorators.py
   cs/pythia/trunk/opal/contrib/auth/forms.py
   cs/pythia/trunk/opal/contrib/auth/handlers/modpython.py
   cs/pythia/trunk/opal/contrib/auth/management.py
   cs/pythia/trunk/opal/contrib/auth/models.py
   cs/pythia/trunk/opal/contrib/comments/models.py
   cs/pythia/trunk/opal/contrib/comments/views/comments.py
   cs/pythia/trunk/opal/contrib/contenttypes/management.py
   cs/pythia/trunk/opal/contrib/contenttypes/models.py
   cs/pythia/trunk/opal/contrib/csrf/middleware.py
   cs/pythia/trunk/opal/contrib/flatpages/README.TXT
   cs/pythia/trunk/opal/contrib/flatpages/views.py
   cs/pythia/trunk/opal/contrib/humanize/templatetags/humanize.py
   cs/pythia/trunk/opal/contrib/sessions/middleware.py
   cs/pythia/trunk/opal/contrib/sessions/models.py
   cs/pythia/trunk/opal/contrib/sites/management.py
   cs/pythia/trunk/opal/contrib/syndication/feeds.py
   cs/pythia/trunk/opal/core/cache/__init__.py
   cs/pythia/trunk/opal/core/cache/backends/dummy.py
   cs/pythia/trunk/opal/core/cache/backends/memcached.py
   cs/pythia/trunk/opal/core/context_processors.py
   cs/pythia/trunk/opal/core/handlers/base.py
   cs/pythia/trunk/opal/core/handlers/modpython.py
   cs/pythia/trunk/opal/core/handlers/wsgi.py
   cs/pythia/trunk/opal/core/mail.py
   cs/pythia/trunk/opal/core/management.py
   cs/pythia/trunk/opal/core/paginator.py
   cs/pythia/trunk/opal/core/serializers/__init__.py
   cs/pythia/trunk/opal/core/serializers/base.py
   cs/pythia/trunk/opal/core/serializers/json.py
   cs/pythia/trunk/opal/core/serializers/python.py
   cs/pythia/trunk/opal/core/serializers/xml_serializer.py
   cs/pythia/trunk/opal/core/servers/basehttp.py
   cs/pythia/trunk/opal/core/servers/fastcgi.py
   cs/pythia/trunk/opal/core/urlresolvers.py
   cs/pythia/trunk/opal/core/validators.py
   cs/pythia/trunk/opal/core/xheaders.py
   cs/pythia/trunk/opal/db/__init__.py
   cs/pythia/trunk/opal/db/backends/ado_mssql/base.py
   cs/pythia/trunk/opal/db/backends/ado_mssql/creation.py
   cs/pythia/trunk/opal/db/backends/dummy/base.py
   cs/pythia/trunk/opal/db/backends/mysql/base.py
   cs/pythia/trunk/opal/db/backends/mysql/client.py
   cs/pythia/trunk/opal/db/backends/mysql/creation.py
   cs/pythia/trunk/opal/db/backends/mysql/introspection.py
   cs/pythia/trunk/opal/db/backends/oracle/base.py
   cs/pythia/trunk/opal/db/backends/oracle/creation.py
   cs/pythia/trunk/opal/db/backends/postgresql/base.py
   cs/pythia/trunk/opal/db/backends/postgresql/creation.py
   cs/pythia/trunk/opal/db/backends/postgresql_psycopg2/base.py
   cs/pythia/trunk/opal/db/backends/sqlite3/base.py
   cs/pythia/trunk/opal/db/backends/sqlite3/creation.py
   cs/pythia/trunk/opal/db/backends/util.py
   cs/pythia/trunk/opal/db/models/__init__.py
   cs/pythia/trunk/opal/db/models/base.py
   cs/pythia/trunk/opal/db/models/fields/__init__.py
   cs/pythia/trunk/opal/db/models/fields/generic.py
   cs/pythia/trunk/opal/db/models/fields/related.py
   cs/pythia/trunk/opal/db/models/loading.py
   cs/pythia/trunk/opal/db/models/manager.py
   cs/pythia/trunk/opal/db/models/manipulators.py
   cs/pythia/trunk/opal/db/models/options.py
   cs/pythia/trunk/opal/db/models/query.py
   cs/pythia/trunk/opal/db/models/related.py
   cs/pythia/trunk/opal/dispatch/dispatcher.py
   cs/pythia/trunk/opal/forms/__init__.py
   cs/pythia/trunk/opal/http/__init__.py
   cs/pythia/trunk/opal/middleware/cache.py
   cs/pythia/trunk/opal/middleware/common.py
   cs/pythia/trunk/opal/middleware/doc.py
   cs/pythia/trunk/opal/middleware/gzip.py
   cs/pythia/trunk/opal/middleware/http.py
   cs/pythia/trunk/opal/scripts/compile-messages.py
   cs/pythia/trunk/opal/scripts/daily_cleanup.py
   cs/pythia/trunk/opal/scripts/make-messages.py
   cs/pythia/trunk/opal/shortcuts/__init__.py
   cs/pythia/trunk/opal/sites/WebSite.py
   cs/pythia/trunk/opal/template/__init__.py
   cs/pythia/trunk/opal/template/context.py
   cs/pythia/trunk/opal/template/defaultfilters.py
   cs/pythia/trunk/opal/template/defaulttags.py
   cs/pythia/trunk/opal/template/loader.py
   cs/pythia/trunk/opal/template/loader_tags.py
   cs/pythia/trunk/opal/template/loaders/app_directories.py
   cs/pythia/trunk/opal/template/loaders/filesystem.py
   cs/pythia/trunk/opal/templatetags/__init__.py
   cs/pythia/trunk/opal/utils/datastructures.py
   cs/pythia/trunk/opal/utils/dateformat.py
   cs/pythia/trunk/opal/utils/feedgenerator.py
   cs/pythia/trunk/opal/utils/functional.py
   cs/pythia/trunk/opal/utils/simplejson/LICENSE.txt
   cs/pythia/trunk/opal/utils/simplejson/__init__.py
   cs/pythia/trunk/opal/utils/simplejson/decoder.py
   cs/pythia/trunk/opal/utils/simplejson/encoder.py
   cs/pythia/trunk/opal/utils/simplejson/scanner.py
   cs/pythia/trunk/opal/utils/text.py
   cs/pythia/trunk/opal/views/debug.py
   cs/pythia/trunk/opal/views/defaults.py
   cs/pythia/trunk/opal/views/generic/date_based.py
   cs/pythia/trunk/opal/views/generic/simple.py
   cs/pythia/trunk/opal/views/i18n.py
   cs/pythia/trunk/opal/views/static.py
Log:
Merged Django v0.96 into Opal (excluding 'newforms' and the testing
framework).


Modified: cs/pythia/trunk/opal/conf/project_template/settings.py
===================================================================
--- cs/pythia/trunk/opal/conf/project_template/settings.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/conf/project_template/settings.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -9,15 +9,18 @@
 
 MANAGERS = ADMINS
 
-DATABASE_ENGINE = ''           # 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
+DATABASE_ENGINE = ''           # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
 DATABASE_NAME = ''             # Or path to database file if using sqlite3.
 DATABASE_USER = ''             # Not used with sqlite3.
 DATABASE_PASSWORD = ''         # Not used with sqlite3.
 DATABASE_HOST = ''             # Set to empty string for localhost. Not used with sqlite3.
 DATABASE_PORT = ''             # Set to empty string for default. Not used with sqlite3.
 
-# Local time zone for this installation. All choices can be found here:
-# http://www.postgresql.org/docs/current/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
+# Local time zone for this installation. Choices can be found here:
+# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
+# although not all variations may be possible on all operating systems.
+# If running in a Windows environment this must be set to the same as your
+# system time zone.
 TIME_ZONE = 'America/Chicago'
 
 # Language code for this installation. All choices can be found here:
@@ -27,6 +30,10 @@
 
 SITE_ID = 1
 
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
 # Absolute path to the directory that holds media.
 # Example: "/home/media/media.lawrence.com/"
 MEDIA_ROOT = ''
@@ -60,8 +67,9 @@
 ROOT_URLCONF = '{{ project_name }}.urls'
 
 TEMPLATE_DIRS = (
-    # Put strings here, like "/home/html/django_templates".
+    # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
     # Always use forward slashes, even on Windows.
+    # Don't forget to use absolute paths, not relative paths.
 )
 
 INSTALLED_APPS = (

Modified: cs/pythia/trunk/opal/conf/project_template/urls.py
===================================================================
--- cs/pythia/trunk/opal/conf/project_template/urls.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/conf/project_template/urls.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,7 +2,7 @@
 
 urlpatterns = patterns('',
     # Example:
-    # (r'^{{ project_name }}/', include('{{ project_name }}.apps.foo.urls.foo')),
+    # (r'^{{ project_name }}/', include('{{ project_name }}.foo.urls')),
 
     # Uncomment this for admin:
 #     (r'^admin/', include('opal.contrib.admin.urls')),

Modified: cs/pythia/trunk/opal/conf/urls/defaults.py
===================================================================
--- cs/pythia/trunk/opal/conf/urls/defaults.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/conf/urls/defaults.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -10,8 +10,10 @@
 def patterns(prefix, *tuples):
     pattern_list = []
     for t in tuples:
-        if type(t[1]) == list:
-            pattern_list.append(RegexURLResolver(t[0], t[1][0]))
+        regex, view_or_include = t[:2]
+        default_kwargs = t[2:]
+        if type(view_or_include) == list:
+            pattern_list.append(RegexURLResolver(regex, view_or_include[0], *default_kwargs))
         else:
-            pattern_list.append(RegexURLPattern(t[0], prefix and (prefix + '.' + t[1]) or t[1], *t[2:]))
+            pattern_list.append(RegexURLPattern(regex, prefix and (prefix + '.' + view_or_include) or view_or_include, *default_kwargs))
     return pattern_list

Modified: cs/pythia/trunk/opal/contrib/auth/__init__.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -9,7 +9,7 @@
     i = path.rfind('.')
     module, attr = path[:i], path[i+1:]
     try:
-        mod = __import__(module, '', '', [attr])
+        mod = __import__(module, {}, {}, [attr])
     except ImportError, e:
         raise ImproperlyConfigured, 'Error importing authentication backend %s: "%s"' % (module, e)
     try:

Modified: cs/pythia/trunk/opal/contrib/auth/create_superuser.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/create_superuser.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/create_superuser.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -46,6 +46,7 @@
             if not username.isalnum():
                 sys.stderr.write("Error: That username is invalid. Use only letters, digits and underscores.\n")
                 username = None
+                continue
             try:
                 User.objects.get(username=username)
             except User.DoesNotExist:

Modified: cs/pythia/trunk/opal/contrib/auth/decorators.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/decorators.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/decorators.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -26,3 +26,11 @@
     to the log-in page if necessary.
     """
     )
+
+def permission_required(perm, login_url=LOGIN_URL):
+    """
+    Decorator for views that checks whether a user has a particular permission
+    enabled, redirecting to the log-in page if necessary.
+    """
+    return user_passes_test(lambda u: u.has_perm(perm), login_url=login_url)
+

Modified: cs/pythia/trunk/opal/contrib/auth/forms.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/forms.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/forms.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -4,7 +4,30 @@
 from opal.template import Context, loader
 from opal.core import validators
 from opal import forms
+from opal.utils.translation import gettext as _
 
+class UserCreationForm(forms.Manipulator):
+    "A form that creates a user, with no privileges, from the given username and password."
+    def __init__(self):
+        self.fields = (
+            forms.TextField(field_name='username', length=30, maxlength=30, is_required=True,
+                validator_list=[validators.isAlphaNumeric, self.isValidUsername]),
+            forms.PasswordField(field_name='password1', length=30, maxlength=60, is_required=True),
+            forms.PasswordField(field_name='password2', length=30, maxlength=60, is_required=True,
+                validator_list=[validators.AlwaysMatchesOtherField('password1', _("The two password fields didn't match."))]),
+        )
+
+    def isValidUsername(self, field_data, all_data):
+        try:
+            User.objects.get(username=field_data)
+        except User.DoesNotExist:
+            return
+        raise validators.ValidationError, _('A user with that username already exists.')
+
+    def save(self, new_data):
+        "Creates the user."
+        return User.objects.create_user(new_data['username'], '', new_data['password1'])
+
 class AuthenticationForm(forms.Manipulator):
     """
     Base class for authenticating users. Extend this to get a form that accepts
@@ -59,7 +82,7 @@
         try:
             self.user_cache = User.objects.get(email__iexact=new_data)
         except User.DoesNotExist:
-            raise validators.ValidationError, "That e-mail address doesn't have an associated user acount. Are you sure you've registered?"
+            raise validators.ValidationError, _("That e-mail address doesn't have an associated user account. Are you sure you've registered?")
 
     def save(self, domain_override=None, email_template_name='registration/password_reset_email.html'):
         "Calculates a new password randomly and sends it to the user"
@@ -91,16 +114,31 @@
             forms.PasswordField(field_name="old_password", length=30, maxlength=30, is_required=True,
                 validator_list=[self.isValidOldPassword]),
             forms.PasswordField(field_name="new_password1", length=30, maxlength=30, is_required=True,
-                validator_list=[validators.AlwaysMatchesOtherField('new_password2', "The two 'new password' fields didn't match.")]),
+                validator_list=[validators.AlwaysMatchesOtherField('new_password2', _("The two 'new password' fields didn't match."))]),
             forms.PasswordField(field_name="new_password2", length=30, maxlength=30, is_required=True),
         )
 
     def isValidOldPassword(self, new_data, all_data):
         "Validates that the old_password field is correct."
         if not self.user.check_password(new_data):
-            raise validators.ValidationError, "Your old password was entered incorrectly. Please enter it again."
+            raise validators.ValidationError, _("Your old password was entered incorrectly. Please enter it again.")
 
     def save(self, new_data):
         "Saves the new password."
         self.user.set_password(new_data['new_password1'])
         self.user.save()
+
+class AdminPasswordChangeForm(forms.Manipulator):
+    "A form used to change the password of a user in the admin interface."
+    def __init__(self, user):
+        self.user = user
+        self.fields = (
+            forms.PasswordField(field_name='password1', length=30, maxlength=60, is_required=True),
+            forms.PasswordField(field_name='password2', length=30, maxlength=60, is_required=True,
+                validator_list=[validators.AlwaysMatchesOtherField('password1', _("The two password fields didn't match."))]),
+        )
+
+    def save(self, new_data):
+        "Saves the new password."
+        self.user.set_password(new_data['password1'])
+        self.user.save()

Modified: cs/pythia/trunk/opal/contrib/auth/handlers/modpython.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/handlers/modpython.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/handlers/modpython.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -19,6 +19,8 @@
     superuser_only = _str_to_bool(options.get('DjangoRequireSuperuserStatus', "off"))
 
     from opal.contrib.auth.models import User
+    from opal import db
+    db.reset_queries()
 
     # check that the username is valid
     kwargs = {'username': req.user, 'is_active': True}
@@ -27,18 +29,21 @@
     if superuser_only:
         kwargs['is_superuser'] = True
     try:
-        user = User.objects.get(**kwargs)
-    except User.DoesNotExist:
-        return apache.HTTP_UNAUTHORIZED
-
-    # check the password and any permission given
-    if user.check_password(req.get_basic_auth_pw()):
-        if permission_name:
-            if user.has_perm(permission_name):
+        try:
+            user = User.objects.get(**kwargs)
+        except User.DoesNotExist:
+            return apache.HTTP_UNAUTHORIZED
+    
+        # check the password and any permission given
+        if user.check_password(req.get_basic_auth_pw()):
+            if permission_name:
+                if user.has_perm(permission_name):
+                    return apache.OK
+                else:
+                    return apache.HTTP_UNAUTHORIZED
+            else:
                 return apache.OK
-            else:
-                return apache.HTTP_UNAUTHORIZED
         else:
-            return apache.OK
-    else:
-        return apache.HTTP_UNAUTHORIZED
+            return apache.HTTP_UNAUTHORIZED
+    finally:
+        db.connection.close()

Modified: cs/pythia/trunk/opal/contrib/auth/management.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/management.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/management.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -16,7 +16,7 @@
         perms.append((_get_permission_codename(action, opts), 'Can %s %s' % (action, opts.verbose_name)))
     return perms + list(opts.permissions)
 
-def create_permissions(app, created_models):
+def create_permissions(app, created_models, verbosity):
     from opal.contrib.contenttypes.models import ContentType
     from opal.contrib.auth.models import Permission
     app_models = get_models(app)
@@ -27,13 +27,13 @@
         for codename, name in _get_all_permissions(klass._meta):
             p, created = Permission.objects.get_or_create(codename=codename, content_type__pk=ctype.id,
                 defaults={'name': name, 'content_type': ctype})
-            if created:
+            if created and verbosity >= 2:
                 print "Adding permission '%s'" % p
 
-def create_superuser(app, created_models):
+def create_superuser(app, created_models, verbosity, **kwargs):
     from opal.contrib.auth.models import User
     from opal.contrib.auth.create_superuser import createsuperuser as do_create
-    if User in created_models:
+    if User in created_models and kwargs.get('interactive', True):
         msg = "\nYou just installed Django's auth system, which means you don't have " \
                 "any superusers defined.\nWould you like to create one now? (yes/no): "
         confirm = raw_input(msg)

Modified: cs/pythia/trunk/opal/contrib/auth/models.py
===================================================================
--- cs/pythia/trunk/opal/contrib/auth/models.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/auth/models.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -33,7 +33,7 @@
 
     Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date."
 
-    Three basic permissions -- add, create and delete -- are automatically created for each Django model.
+    Three basic permissions -- add, change and delete -- are automatically created for each Django model.
     """
     name = models.CharField(_('name'), maxlength=50)
     content_type = models.ForeignKey(ContentType)
@@ -91,10 +91,10 @@
     first_name = models.CharField(_('first name'), maxlength=30, blank=True)
     last_name = models.CharField(_('last name'), maxlength=30, blank=True)
     email = models.EmailField(_('e-mail address'), blank=True)
-    password = models.CharField(_('password'), maxlength=128, help_text=_("Use '[algo]$[salt]$[hexdigest]'"))
-    is_staff = models.BooleanField(_('staff status'), help_text=_("Designates whether the user can log into this admin site."))
+    password = models.CharField(_('password'), maxlength=128, help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
+    is_staff = models.BooleanField(_('staff status'), default=False, help_text=_("Designates whether the user can log into this admin site."))
     is_active = models.BooleanField(_('active'), default=True, help_text=_("Designates whether this user can log into the Django admin. Unselect this instead of deleting accounts."))
-    is_superuser = models.BooleanField(_('superuser status'), help_text=_("Designates that this user has all permissions without explicitly assigning them."))
+    is_superuser = models.BooleanField(_('superuser status'), default=False, help_text=_("Designates that this user has all permissions without explicitly assigning them."))
     last_login = models.DateTimeField(_('last login'), default=models.LazyDate())
     date_joined = models.DateTimeField(_('date joined'), default=models.LazyDate())
     groups = models.ManyToManyField(Group, verbose_name=_('groups'), blank=True,
@@ -126,7 +126,7 @@
     def is_anonymous(self):
         "Always returns False. This is a way of comparing User objects to anonymous users."
         return False
-    
+
     def is_authenticated(self):
         """Always return True. This is a way to tell if the user has been authenticated in templates.
         """
@@ -216,6 +216,8 @@
 
     def has_module_perms(self, app_label):
         "Returns True if the user has any permissions in the given app label."
+        if not self.is_active:
+            return False
         if self.is_superuser:
             return True
         return bool(len([p for p in self.get_all_permissions() if p[:p.index('.')] == app_label]))
@@ -268,6 +270,15 @@
     def __str__(self):
         return 'AnonymousUser'
 
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __hash__(self):
+        return 1 # instances always return the same hash value
+
     def save(self):
         raise NotImplementedError
 
@@ -299,6 +310,6 @@
 
     def is_anonymous(self):
         return True
-    
+
     def is_authenticated(self):
         return False

Modified: cs/pythia/trunk/opal/contrib/comments/models.py
===================================================================
--- cs/pythia/trunk/opal/contrib/comments/models.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/comments/models.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -34,7 +34,7 @@
         """
         Given a rating_string, this returns a tuple of (rating_range, options).
         >>> s = "scale:1-10|First_category|Second_category"
-        >>> get_rating_options(s)
+        >>> Comment.objects.get_rating_options(s)
         ([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], ['First category', 'Second category'])
         """
         rating_range, options = rating_string.split('|', 1)

Modified: cs/pythia/trunk/opal/contrib/comments/views/comments.py
===================================================================
--- cs/pythia/trunk/opal/contrib/comments/views/comments.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/comments/views/comments.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -109,7 +109,7 @@
         # send the comment to the managers.
         if self.user_cache.comment_set.count() <= settings.COMMENTS_FIRST_FEW:
             message = ngettext('This comment was posted by a user who has posted fewer than %(count)s comment:\n\n%(text)s',
-                'This comment was posted by a user who has posted fewer than %(count)s comments:\n\n%(text)s') % \
+                'This comment was posted by a user who has posted fewer than %(count)s comments:\n\n%(text)s', settings.COMMENTS_FIRST_FEW) % \
                 {'count': settings.COMMENTS_FIRST_FEW, 'text': c.get_as_text()}
             mail_managers("Comment posted by rookie user", message)
         if settings.COMMENTS_SKETCHY_USERS_GROUP and settings.COMMENTS_SKETCHY_USERS_GROUP in [g.id for g in self.user_cache.get_group_list()]:
@@ -217,7 +217,7 @@
     errors = manipulator.get_validation_errors(new_data)
     # If user gave correct username/password and wasn't already logged in, log them in
     # so they don't have to enter a username/password again.
-    if manipulator.get_user() and new_data.has_key('password') and manipulator.get_user().check_password(new_data['password']):
+    if manipulator.get_user() and not manipulator.get_user().is_authenticated() and new_data.has_key('password') and manipulator.get_user().check_password(new_data['password']):
         from opal.contrib.auth import login
         login(request, manipulator.get_user())
     if errors or request.POST.has_key('preview'):

Modified: cs/pythia/trunk/opal/contrib/contenttypes/management.py
===================================================================
--- cs/pythia/trunk/opal/contrib/contenttypes/management.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/contenttypes/management.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -3,10 +3,11 @@
 """
 
 from opal.dispatch import dispatcher
-from opal.db.models import get_models, signals
+from opal.db.models import get_apps, get_models, signals
 
-def create_contenttypes(app, created_models):
+def create_contenttypes(app, created_models, verbosity=2):
     from opal.contrib.contenttypes.models import ContentType
+    ContentType.objects.clear_cache()
     app_models = get_models(app)
     if not app_models:
         return
@@ -19,6 +20,14 @@
             ct = ContentType(name=str(opts.verbose_name),
                 app_label=opts.app_label, model=opts.object_name.lower())
             ct.save()
-            print "Adding content type '%s | %s'" % (ct.app_label, ct.model)
+            if verbosity >= 2:
+                print "Adding content type '%s | %s'" % (ct.app_label, ct.model)
 
+def create_all_contenttypes(verbosity=2):
+    for app in get_apps():
+        create_contenttypes(app, None, verbosity)
+
 dispatcher.connect(create_contenttypes, signal=signals.post_syncdb)
+
+if __name__ == "__main__":
+    create_all_contenttypes()

Modified: cs/pythia/trunk/opal/contrib/contenttypes/models.py
===================================================================
--- cs/pythia/trunk/opal/contrib/contenttypes/models.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/contenttypes/models.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,6 +1,7 @@
 from opal.db import models
 from opal.utils.translation import gettext_lazy as _
 
+CONTENT_TYPE_CACHE = {}
 class ContentTypeManager(models.Manager):
     def get_for_model(self, model):
         """
@@ -8,11 +9,26 @@
         ContentType if necessary.
         """
         opts = model._meta
-        # The str() is needed around opts.verbose_name because it's a
-        # opal.utils.functional.__proxy__ object.
-        ct, created = self.model._default_manager.get_or_create(app_label=opts.app_label,
-            model=opts.object_name.lower(), defaults={'name': str(opts.verbose_name)})
+        key = (opts.app_label, opts.object_name.lower())
+        try:
+            ct = CONTENT_TYPE_CACHE[key]
+        except KeyError:
+            # The str() is needed around opts.verbose_name because it's a
+            # opal.utils.functional.__proxy__ object.
+            ct, created = self.model._default_manager.get_or_create(app_label=key[0],
+                model=key[1], defaults={'name': str(opts.verbose_name)})
+            CONTENT_TYPE_CACHE[key] = ct
         return ct
+        
+    def clear_cache(self):
+        """
+        Clear out the content-type cache. This needs to happen during database
+        flushes to prevent caching of "stale" content type IDs (see
+        opal.contrib.contenttypes.management.create_contenttypes for where
+        this gets called).
+        """
+        global CONTENT_TYPE_CACHE
+        CONTENT_TYPE_CACHE = {}
 
 class ContentType(models.Model):
     name = models.CharField(maxlength=100)

Modified: cs/pythia/trunk/opal/contrib/csrf/middleware.py
===================================================================
--- cs/pythia/trunk/opal/contrib/csrf/middleware.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/csrf/middleware.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -11,7 +11,7 @@
 import re
 import itertools
 
-_ERROR_MSG = "<h1>403 Forbidden</h1><p>Cross Site Request Forgery detected.  Request aborted.</p>"
+_ERROR_MSG = '<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en"><body><h1>403 Forbidden</h1><p>Cross Site Request Forgery detected. Request aborted.</p></body></html>'
 
 _POST_FORM_RE = \
     re.compile(r'(<form\W[^>]*\bmethod=(\'|"|)POST(\'|"|)\b[^>]*>)', re.IGNORECASE)

Modified: cs/pythia/trunk/opal/contrib/flatpages/README.TXT
===================================================================
--- cs/pythia/trunk/opal/contrib/flatpages/README.TXT	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/flatpages/README.TXT	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,7 +2,7 @@
 
 For full documentation, see either of these:
 
-    * The file django/docs/flatpages.txt in the Django distribution
+    * The file docs/flatpages.txt in the Django distribution
     * http://www.djangoproject.com/documentation/flatpages/ on the Web
 
-Both have identical content.
\ No newline at end of file
+Both have identical content.

Modified: cs/pythia/trunk/opal/contrib/flatpages/views.py
===================================================================
--- cs/pythia/trunk/opal/contrib/flatpages/views.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/flatpages/views.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -3,6 +3,7 @@
 from opal.shortcuts import get_object_or_404
 from opal.http import HttpResponse
 from opal.conf import settings
+from opal.core.xheaders import populate_xheaders
 
 DEFAULT_TEMPLATE = 'flatpages/default.html'
 
@@ -32,4 +33,6 @@
     c = RequestContext(request, {
         'flatpage': f,
     })
-    return HttpResponse(t.render(c))
+    response = HttpResponse(t.render(c))
+    populate_xheaders(request, response, FlatPage, f.id)
+    return response

Copied: cs/pythia/trunk/opal/contrib/formtools (from rev 7718, vendor/django/current/django/contrib/formtools)

Modified: cs/pythia/trunk/opal/contrib/humanize/templatetags/humanize.py
===================================================================
--- cs/pythia/trunk/opal/contrib/humanize/templatetags/humanize.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/humanize/templatetags/humanize.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,3 +1,5 @@
+from opal.utils.translation import ngettext
+from opal.utils.translation import gettext_lazy as _
 from opal import template
 import re
 
@@ -12,9 +14,9 @@
         value = int(value)
     except ValueError:
         return value
-    t = ('th', 'st', 'nd', 'rd', 'th', 'th', 'th', 'th', 'th', 'th')
+    t = (_('th'), _('st'), _('nd'), _('rd'), _('th'), _('th'), _('th'), _('th'), _('th'), _('th'))
     if value % 100 in (11, 12, 13): # special case
-        return '%dth' % value
+        return "%d%s" % (value, t[0])
     return '%d%s' % (value, t[value % 10])
 register.filter(ordinal)
 
@@ -41,11 +43,14 @@
     if value < 1000000:
         return value
     if value < 1000000000:
-        return '%.1f million' % (value / 1000000.0)
+    	new_value = value / 1000000.0
+        return ngettext('%(value).1f million', '%(value).1f million', new_value) % {'value': new_value}
     if value < 1000000000000:
-        return '%.1f billion' % (value / 1000000000.0)
+        new_value = value / 1000000000.0
+        return ngettext('%(value).1f billion', '%(value).1f billion', new_value) % {'value': new_value}
     if value < 1000000000000000:
-        return '%.1f trillion' % (value / 1000000000000.0)
+        new_value = value / 1000000000000.0
+        return ngettext('%(value).1f trillion', '%(value).1f trillion', new_value) % {'value': new_value}
     return value
 register.filter(intword)
 
@@ -60,5 +65,5 @@
         return value
     if not 0 < value < 10:
         return value
-    return ('one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine')[value-1]
+    return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value-1]
 register.filter(apnumber)

Copied: cs/pythia/trunk/opal/contrib/localflavor (from rev 7718, vendor/django/current/django/contrib/localflavor)

Modified: cs/pythia/trunk/opal/contrib/sessions/middleware.py
===================================================================
--- cs/pythia/trunk/opal/contrib/sessions/middleware.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/sessions/middleware.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,5 +1,6 @@
 from opal.conf import settings
 from opal.contrib.sessions.models import Session
+from opal.core.exceptions import SuspiciousOperation
 from opal.utils.cache import patch_vary_headers
 import datetime
 
@@ -9,6 +10,7 @@
 class SessionWrapper(object):
     def __init__(self, session_key):
         self.session_key = session_key
+        self.accessed = False
         self.modified = False
 
     def __contains__(self, key):
@@ -45,6 +47,7 @@
 
     def _get_session(self):
         # Lazily loads session from storage.
+        self.accessed = True
         try:
             return self._session_cache
         except AttributeError:
@@ -55,7 +58,7 @@
                     s = Session.objects.get(session_key=self.session_key,
                         expire_date__gt=datetime.datetime.now())
                     self._session_cache = s.get_decoded()
-                except Session.DoesNotExist:
+                except (Session.DoesNotExist, SuspiciousOperation):
                     self._session_cache = {}
                     # Set the session_key to None to force creation of a new
                     # key, for extra security.
@@ -71,14 +74,21 @@
     def process_response(self, request, response):
         # If request.session was modified, or if response.session was set, save
         # those changes and set a session cookie.
-        patch_vary_headers(response, ('Cookie',))
         try:
+            accessed = request.session.accessed
             modified = request.session.modified
         except AttributeError:
             pass
         else:
+            if accessed:
+                patch_vary_headers(response, ('Cookie',))
             if modified or settings.SESSION_SAVE_EVERY_REQUEST:
-                session_key = request.session.session_key or Session.objects.get_new_session_key()
+                if request.session.session_key:
+                    session_key = request.session.session_key
+                else:
+                    obj = Session.objects.get_new_session_object()
+                    session_key = obj.session_key
+
                 if settings.SESSION_EXPIRE_AT_BROWSER_CLOSE:
                     max_age = None
                     expires = None
@@ -88,5 +98,6 @@
                 new_session = Session.objects.save(session_key, request.session._session,
                     datetime.datetime.now() + datetime.timedelta(seconds=settings.SESSION_COOKIE_AGE))
                 response.set_cookie(settings.SESSION_COOKIE_NAME, session_key,
-                    max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN)
+                    max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
+                    secure=settings.SESSION_COOKIE_SECURE or None)
         return response

Modified: cs/pythia/trunk/opal/contrib/sessions/models.py
===================================================================
--- cs/pythia/trunk/opal/contrib/sessions/models.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/sessions/models.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,4 +1,4 @@
-import base64, md5, random, sys
+import base64, md5, random, sys, datetime
 import cPickle as pickle
 from opal.db import models
 from opal.utils.translation import gettext_lazy as _
@@ -23,6 +23,23 @@
                 break
         return session_key
 
+    def get_new_session_object(self):
+        """
+        Returns a new session object.
+        """
+        # FIXME: There is a *small* chance of collision here, meaning we will
+        # return an existing object. That can be fixed when we add a way to
+        # validate (and guarantee) that non-auto primary keys are unique. For
+        # now, we save immediately in order to reduce the "window of
+        # misfortune" as much as possible.
+        created = False
+        while not created:
+            obj, created = self.get_or_create(session_key=self.get_new_session_key(),
+                    expire_date = datetime.datetime.now())
+            # Collision in key generation, so re-seed the generator
+            random.seed()
+        return obj
+
     def save(self, session_key, session_dict, expire_date):
         s = self.model(session_key, self.encode(session_dict), expire_date)
         if session_dict:

Copied: cs/pythia/trunk/opal/contrib/sitemaps (from rev 7718, vendor/django/current/django/contrib/sitemaps)

Modified: cs/pythia/trunk/opal/contrib/sites/management.py
===================================================================
--- cs/pythia/trunk/opal/contrib/sites/management.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/sites/management.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -7,9 +7,10 @@
 from opal.contrib.sites.models import Site
 from opal.contrib.sites import models as site_app
 
-def create_default_site(app, created_models):
+def create_default_site(app, created_models, verbosity):
     if Site in created_models:
-        print "Creating example.com Site object"
+        if verbosity >= 2:
+            print "Creating example.com Site object"
         s = Site(domain="example.com", name="example.com")
         s.save()
 

Modified: cs/pythia/trunk/opal/contrib/syndication/feeds.py
===================================================================
--- cs/pythia/trunk/opal/contrib/syndication/feeds.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/contrib/syndication/feeds.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -78,6 +78,7 @@
             author_link = self.__get_dynamic_attr('author_link', obj),
             author_email = self.__get_dynamic_attr('author_email', obj),
             categories = self.__get_dynamic_attr('categories', obj),
+            feed_copyright = self.__get_dynamic_attr('feed_copyright', obj),
         )
 
         try:
@@ -116,5 +117,6 @@
                 author_email = author_email,
                 author_link = author_link,
                 categories = self.__get_dynamic_attr('item_categories', item),
+                item_copyright = self.__get_dynamic_attr('item_copyright', item),
             )
         return feed

Modified: cs/pythia/trunk/opal/core/cache/__init__.py
===================================================================
--- cs/pythia/trunk/opal/core/cache/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/cache/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -48,7 +48,7 @@
     if host.endswith('/'):
         host = host[:-1]
 
-    cache_class = getattr(__import__('opal.core.cache.backends.%s' % BACKENDS[scheme], '', '', ['']), 'CacheClass')
+    cache_class = getattr(__import__('opal.core.cache.backends.%s' % BACKENDS[scheme], {}, {}, ['']), 'CacheClass')
     return cache_class(host, params)
 
 cache = get_cache(settings.CACHE_BACKEND)

Modified: cs/pythia/trunk/opal/core/cache/backends/dummy.py
===================================================================
--- cs/pythia/trunk/opal/core/cache/backends/dummy.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/cache/backends/dummy.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -6,8 +6,8 @@
     def __init__(self, *args, **kwargs):
         pass
 
-    def get(self, *args, **kwargs):
-        pass
+    def get(self, key, default=None):
+        return default
 
     def set(self, *args, **kwargs):
         pass
@@ -16,7 +16,7 @@
         pass
 
     def get_many(self, *args, **kwargs):
-        pass
+        return {}
 
     def has_key(self, *args, **kwargs):
         return False

Modified: cs/pythia/trunk/opal/core/cache/backends/memcached.py
===================================================================
--- cs/pythia/trunk/opal/core/cache/backends/memcached.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/cache/backends/memcached.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -20,7 +20,7 @@
             return val
 
     def set(self, key, value, timeout=0):
-        self._cache.set(key, value, timeout)
+        self._cache.set(key, value, timeout or self.default_timeout)
 
     def delete(self, key):
         self._cache.delete(key)

Modified: cs/pythia/trunk/opal/core/context_processors.py
===================================================================
--- cs/pythia/trunk/opal/core/context_processors.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/context_processors.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -51,15 +51,19 @@
 class PermLookupDict(object):
     def __init__(self, user, module_name):
         self.user, self.module_name = user, module_name
+
     def __repr__(self):
-        return str(self.user.get_permission_list())
+        return str(self.user.get_all_permissions())
+
     def __getitem__(self, perm_name):
         return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
+
     def __nonzero__(self):
         return self.user.has_module_perms(self.module_name)
 
 class PermWrapper(object):
     def __init__(self, user):
         self.user = user
+
     def __getitem__(self, module_name):
         return PermLookupDict(self.user, module_name)

Modified: cs/pythia/trunk/opal/core/handlers/base.py
===================================================================
--- cs/pythia/trunk/opal/core/handlers/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/handlers/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -26,7 +26,7 @@
                 raise exceptions.ImproperlyConfigured, '%s isn\'t a middleware module' % middleware_path
             mw_module, mw_classname = middleware_path[:dot], middleware_path[dot+1:]
             try:
-                mod = __import__(mw_module, '', '', [''])
+                mod = __import__(mw_module, {}, {}, [''])
             except ImportError, e:
                 raise exceptions.ImproperlyConfigured, 'Error importing middleware %s: "%s"' % (mw_module, e)
             try:
@@ -48,7 +48,7 @@
             if hasattr(mw_instance, 'process_exception'):
                 self._exception_middleware.insert(0, mw_instance.process_exception)
 
-    def get_response(self, path, request):
+    def get_response(self, request):
         "Returns an HttpResponse object for the given HttpRequest"
         from opal.core import exceptions
         from opal.core.mail import mail_admins
@@ -62,7 +62,7 @@
 
         try:
             site = settings
-            callback, callback_args, callback_kwargs = site.resolve(path)
+            callback, callback_args, callback_kwargs = site.resolve(request.path)
 
             # Apply view middleware
             for middleware_method in self._view_middleware:
@@ -84,12 +84,17 @@
 
             # Complain if the view returned None (a common error).
             if response is None:
-                raise ValueError, "The view %s.%s didn't return an HttpResponse object." % (callback.__module__, callback.func_name)
+                try:
+                    view_name = callback.func_name # If it's a function
+                except AttributeError:
+                    view_name = callback.__class__.__name__ + '.__call__' # If it's a class
+                raise ValueError, "The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name)
 
             return response
         except http.Http404, e:
             if settings.DEBUG:
-                return self.get_technical_error_response(request, is404=True, exception=e)
+                from opal.views import debug
+                return debug.technical_404_response(request, e)
             else:
                 callback, param_dict = resolver.resolve404()
                 return callback(request, **param_dict)
@@ -99,40 +104,24 @@
             pass # See http://code.djangoproject.com/ticket/1023
         except: # Handle everything else, including SuspiciousOperation, etc.
             if settings.DEBUG:
-                return self.get_technical_error_response(request)
+                from opal.views import debug
+                return debug.technical_500_response(request, *sys.exc_info())
             else:
                 # Get the exception info now, in case another exception is thrown later.
                 exc_info = sys.exc_info()
                 receivers = dispatcher.send(signal=signals.got_request_exception)
                 # When DEBUG is False, send an error message to the admins.
-                subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), getattr(request, 'path', ''))
+                subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
                 try:
                     request_repr = repr(request)
                 except:
                     request_repr = "Request repr() unavailable"
                 message = "%s\n\n%s" % (self._get_traceback(exc_info), request_repr)
                 mail_admins(subject, message, fail_silently=True)
-                return self.get_friendly_error_response(request, resolver)
+                # Return an HttpResponse that displays a friendly error message.
+                callback, param_dict = resolver.resolve500()
+                return callback(request, **param_dict)
 
-    def get_friendly_error_response(self, request, resolver):
-        """
-        Returns an HttpResponse that displays a PUBLIC error message for a
-        fundamental error.
-        """
-        callback, param_dict = resolver.resolve500()
-        return callback(request, **param_dict)
-
-    def get_technical_error_response(self, request, is404=False, exception=None):
-        """
-        Returns an HttpResponse that displays a TECHNICAL error message for a
-        fundamental error.
-        """
-        from opal.views import debug
-        if is404:
-            return debug.technical_404_response(request, exception)
-        else:
-            return debug.technical_500_response(request, *sys.exc_info())
-
     def _get_traceback(self, exc_info=None):
         "Helper function to return the traceback as a string"
         import traceback

Modified: cs/pythia/trunk/opal/core/handlers/modpython.py
===================================================================
--- cs/pythia/trunk/opal/core/handlers/modpython.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/handlers/modpython.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -16,14 +16,32 @@
         self.path = req.uri
 
     def __repr__(self):
+        # Since this is called as part of error handling, we need to be very
+        # robust against potentially malformed input.
+        try:
+            get = pformat(self.GET)
+        except:
+            get = '<could not parse>'
+        try:
+            post = pformat(self.POST)
+        except:
+            post = '<could not parse>'
+        try:
+            cookies = pformat(self.COOKIES)
+        except:
+            cookies = '<could not parse>'
+        try:
+            meta = pformat(self.META)
+        except:
+            meta = '<could not parse>'
         return '<ModPythonRequest\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
-            (self.path, pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),
-            pformat(self.META))
+            (self.path, get, post, cookies, meta)
 
     def get_full_path(self):
         return '%s%s' % (self.path, self._req.args and ('?' + self._req.args) or '')
 
     def is_secure(self):
+        # Note: modpython 3.2.10+ has req.is_https(), but we need to support previous versions
         return self._req.subprocess_env.has_key('HTTPS') and self._req.subprocess_env['HTTPS'] == 'on'
 
     def _load_post_and_files(self):
@@ -122,10 +140,6 @@
         # that use settings now can work
         from opal.conf import settings
 
-        if settings.ENABLE_PSYCO:
-            import psyco
-            psyco.profile()
-
         # if we need to set up middleware, now that settings works we can do it now.
         if self._request_middleware is None:
             self.load_middleware()
@@ -133,7 +147,7 @@
         dispatcher.send(signal=signals.request_started)
         try:
             request = ModPythonRequest(req)
-            response = self.get_response(req.uri, request)
+            response = self.get_response(request)
 
             # Apply response middleware
             for middleware_method in self._response_middleware:
@@ -143,21 +157,21 @@
             dispatcher.send(signal=signals.request_finished)
 
         # Convert our custom HttpResponse object back into the mod_python req.
-        populate_apache_request(response, req)
+        req.content_type = response['Content-Type']
+        for key, value in response.headers.items():
+            if key != 'Content-Type':
+                req.headers_out[key] = value
+        for c in response.cookies.values():
+            req.headers_out.add('Set-Cookie', c.output(header=''))
+        req.status = response.status_code
+        try:
+            for chunk in response:
+                req.write(chunk)
+        finally:
+            response.close()
+
         return 0 # mod_python.apache.OK
 
-def populate_apache_request(http_response, mod_python_req):
-    "Populates the mod_python request object with an HttpResponse"
-    mod_python_req.content_type = http_response['Content-Type']
-    for key, value in http_response.headers.items():
-        if key != 'Content-Type':
-            mod_python_req.headers_out[key] = value
-    for c in http_response.cookies.values():
-        mod_python_req.headers_out.add('Set-Cookie', c.output(header=''))
-    mod_python_req.status = http_response.status_code
-    for chunk in http_response.iterator:
-        mod_python_req.write(chunk)
-
 def handler(req):
     # mod_python hooks into this function.
     return ModPythonHandler()(req)

Modified: cs/pythia/trunk/opal/core/handlers/wsgi.py
===================================================================
--- cs/pythia/trunk/opal/core/handlers/wsgi.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/handlers/wsgi.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -4,6 +4,11 @@
 from opal.utils import datastructures
 from opal import http
 from pprint import pformat
+from shutil import copyfileobj
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
 
 # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
 STATUS_CODE_TEXT = {
@@ -50,17 +55,49 @@
     505: 'HTTP VERSION NOT SUPPORTED',
 }
 
+def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0):
+    """
+    A version of shutil.copyfileobj that will not read more than 'size' bytes.
+    This makes it safe from clients sending more than CONTENT_LENGTH bytes of
+    data in the body.
+    """
+    if not size:
+        return
+    while size > 0:
+        buf = fsrc.read(min(length, size))
+        if not buf:
+            break
+        fdst.write(buf)
+        size -= len(buf)
+
 class WSGIRequest(http.HttpRequest):
     def __init__(self, environ):
         self.environ = environ
         self.path = environ['PATH_INFO']
-        self.META = environ 
+        self.META = environ
         self.method = environ['REQUEST_METHOD'].upper()
 
     def __repr__(self):
+        # Since this is called as part of error handling, we need to be very
+        # robust against potentially malformed input.
+        try:
+            get = pformat(self.GET)
+        except:
+            get = '<could not parse>'
+        try:
+            post = pformat(self.POST)
+        except:
+            post = '<could not parse>'
+        try:
+            cookies = pformat(self.COOKIES)
+        except:
+            cookies = '<could not parse>'
+        try:
+            meta = pformat(self.META)
+        except:
+            meta = '<could not parse>'
         return '<WSGIRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
-            (pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),
-            pformat(self.META))
+            (get, post, cookies, meta)
 
     def get_full_path(self):
         return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + self.environ.get('QUERY_STRING', '')) or '')
@@ -119,7 +156,15 @@
         try:
             return self._raw_post_data
         except AttributeError:
-            self._raw_post_data = self.environ['wsgi.input'].read(int(self.environ["CONTENT_LENGTH"]))
+            buf = StringIO()
+            try:
+                # CONTENT_LENGTH might be absent if POST doesn't have content at all (lighttpd)
+                content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+            except ValueError: # if CONTENT_LENGTH was empty string or not an integer
+                content_length = 0
+            safe_copyfileobj(self.environ['wsgi.input'], buf, size=content_length)
+            self._raw_post_data = buf.getvalue()
+            buf.close()
             return self._raw_post_data
 
     GET = property(_get_get, _set_get)
@@ -133,10 +178,6 @@
     def __call__(self, environ, start_response):
         from opal.conf import settings
 
-        if settings.ENABLE_PSYCO:
-            import psyco
-            psyco.profile()
-
         # Set up middleware if needed. We couldn't do this earlier, because
         # settings weren't available.
         if self._request_middleware is None:
@@ -145,7 +186,7 @@
         dispatcher.send(signal=signals.request_started)
         try:
             request = WSGIRequest(environ)
-            response = self.get_response(request.path, request)
+            response = self.get_response(request)
 
             # Apply response middleware
             for middleware_method in self._response_middleware:
@@ -163,4 +204,4 @@
         for c in response.cookies.values():
             response_headers.append(('Set-Cookie', c.output(header='')))
         start_response(status, response_headers)
-        return response.iterator
+        return response

Modified: cs/pythia/trunk/opal/core/mail.py
===================================================================
--- cs/pythia/trunk/opal/core/mail.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/mail.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -3,8 +3,25 @@
 from opal.conf import settings
 from email.MIMEText import MIMEText
 from email.Header import Header
-import smtplib, rfc822
+from email.Utils import formatdate
+import smtplib
+import socket
+import time
+import random
 
+# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
+# seconds, which slows down the restart of the server.
+class CachedDnsName(object):
+    def __str__(self):
+        return self.get_fqdn()
+
+    def get_fqdn(self):
+        if not hasattr(self, '_fqdn'):
+            self._fqdn = socket.getfqdn()
+        return self._fqdn
+
+DNS_NAME = CachedDnsName()
+
 class BadHeaderError(ValueError):
     pass
 
@@ -17,21 +34,34 @@
             val = Header(val, settings.DEFAULT_CHARSET)
         MIMEText.__setitem__(self, name, val)
 
-def send_mail(subject, message, from_email, recipient_list, fail_silently=False, auth_user=settings.EMAIL_HOST_USER, auth_password=settings.EMAIL_HOST_PASSWORD):
+def send_mail(subject, message, from_email, recipient_list, fail_silently=False, auth_user=None, auth_password=None):
     """
     Easy wrapper for sending a single message to a recipient list. All members
     of the recipient list will see the other recipients in the 'To' field.
+
+    If auth_user is None, the EMAIL_HOST_USER setting is used.
+    If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
     """
+    if auth_user is None:
+        auth_user = settings.EMAIL_HOST_USER
+    if auth_password is None:
+        auth_password = settings.EMAIL_HOST_PASSWORD
     return send_mass_mail([[subject, message, from_email, recipient_list]], fail_silently, auth_user, auth_password)
 
-def send_mass_mail(datatuple, fail_silently=False, auth_user=settings.EMAIL_HOST_USER, auth_password=settings.EMAIL_HOST_PASSWORD):
+def send_mass_mail(datatuple, fail_silently=False, auth_user=None, auth_password=None):
     """
     Given a datatuple of (subject, message, from_email, recipient_list), sends
     each message to each recipient list. Returns the number of e-mails sent.
 
     If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
     If auth_user and auth_password are set, they're used to log in.
+    If auth_user is None, the EMAIL_HOST_USER setting is used.
+    If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
     """
+    if auth_user is None:
+        auth_user = settings.EMAIL_HOST_USER
+    if auth_password is None:
+        auth_password = settings.EMAIL_HOST_PASSWORD
     try:
         server = smtplib.SMTP(settings.EMAIL_HOST, settings.EMAIL_PORT)
         if auth_user and auth_password:
@@ -49,8 +79,13 @@
         msg['Subject'] = subject
         msg['From'] = from_email
         msg['To'] = ', '.join(recipient_list)
-        msg['Date'] = rfc822.formatdate()
+        msg['Date'] = formatdate()
         try:
+            random_bits = str(random.getrandbits(64))
+        except AttributeError: # Python 2.3 doesn't have random.getrandbits().
+            random_bits = ''.join([random.choice('1234567890') for i in range(19)])
+        msg['Message-ID'] = "<%d.%s@%s>" % (time.time(), random_bits, DNS_NAME)
+        try:
             server.sendmail(from_email, recipient_list, msg.as_string())
             num_sent += 1
         except:

Modified: cs/pythia/trunk/opal/core/management.py
===================================================================
--- cs/pythia/trunk/opal/core/management.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/management.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -25,13 +25,14 @@
 # which has been installed.
 PROJECT_TEMPLATE_DIR = os.path.join(opal.__path__[0], 'conf', '%s_template')
 
-INVALID_PROJECT_NAMES = ('django', 'test')
+INVALID_PROJECT_NAMES = ('django', 'site', 'test')
 
 # Set up the terminal color scheme.
 class dummy: pass
 style = dummy()
 style.ERROR = termcolors.make_style(fg='red', opts=('bold',))
 style.ERROR_OUTPUT = termcolors.make_style(fg='red', opts=('bold',))
+style.NOTICE = termcolors.make_style(fg='red')
 style.SQL_FIELD = termcolors.make_style(fg='green', opts=('bold',))
 style.SQL_COLTYPE = termcolors.make_style(fg='green')
 style.SQL_KEYWORD = termcolors.make_style(fg='yellow')
@@ -67,6 +68,25 @@
     cursor = connection.cursor()
     return get_introspection_module().get_table_list(cursor)
 
+def _get_sequence_list():
+    "Returns a list of information about all DB sequences for all models in all apps"
+    from opal.db import models
+
+    apps = models.get_apps()
+    sequence_list = []
+
+    for app in apps:
+        for model in models.get_models(app):
+            for f in model._meta.fields:
+                if isinstance(f, models.AutoField):
+                    sequence_list.append({'table':model._meta.db_table,'column':f.column,})
+                    break # Only one AutoField is allowed per model, so don't bother continuing.
+
+            for f in model._meta.many_to_many:
+                sequence_list.append({'table':f.m2m_db_table(),'column':None,})
+
+    return sequence_list
+
 # If the foreign key points to an AutoField, a PositiveIntegerField or a
 # PositiveSmallIntegerField, the foreign key should be an IntegerField, not the
 # referred field type. Otherwise, the foreign key should be the same type of
@@ -90,13 +110,15 @@
             "Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.\n"))
         sys.exit(1)
 
-    # Get installed models, so we generate REFERENCES right
+    # Get installed models, so we generate REFERENCES right.
+    # We trim models from the current app so that the sqlreset command does not
+    # generate invalid SQL (leaving models out of known_models is harmless, so
+    # we can be conservative).
+    app_models = models.get_models(app)
     final_output = []
-    known_models = set(_get_installed_models(_get_table_list()))
+    known_models = set([model for model in _get_installed_models(_get_table_list()) if model not in app_models])
     pending_references = {}
 
-    app_models = models.get_models(app)
-
     for model in app_models:
         output, references = _get_sql_model_create(model, known_models)
         final_output.extend(output)
@@ -114,10 +136,13 @@
     # but don't exist physically
     not_installed_models = set(pending_references.keys())
     if not_installed_models:
-        final_output.append('-- The following references should be added but depend on non-existant tables:')
+        alter_sql = []
         for model in not_installed_models:
-            final_output.extend(['-- ' + sql for sql in
+            alter_sql.extend(['-- ' + sql for sql in
                 _get_sql_for_pending_references(model, pending_references)])
+        if alter_sql:
+            final_output.append('-- The following references should be added but depend on non-existent tables:')
+            final_output.extend(alter_sql)
 
     return final_output
 get_sql_create.help_doc = "Prints the CREATE TABLE SQL statements for the given app name(s)."
@@ -137,7 +162,7 @@
     table_output = []
     pending_references = {}
     for f in opts.fields:
-        if isinstance(f, models.ForeignKey):
+        if isinstance(f, (models.ForeignKey, models.OneToOneField)):
             rel_field = f.rel.get_related_field()
             data_type = get_rel_data_type(rel_field)
         else:
@@ -157,7 +182,8 @@
                 if f.rel.to in known_models:
                     field_output.append(style.SQL_KEYWORD('REFERENCES') + ' ' + \
                         style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)) + ' (' + \
-                        style.SQL_FIELD(backend.quote_name(f.rel.to._meta.get_field(f.rel.field_name).column)) + ')'
+                        style.SQL_FIELD(backend.quote_name(f.rel.to._meta.get_field(f.rel.field_name).column)) + ')' + 
+                        backend.get_deferrable_sql()
                     )
                 else:
                     # We haven't yet created the table to which this field
@@ -188,7 +214,6 @@
     data_types = get_creation_module().DATA_TYPES
 
     final_output = []
-    reference_names = {}
     if backend.supports_constraints:
         opts = model._meta
         if model in pending_references:
@@ -198,15 +223,13 @@
                 r_col = f.column
                 table = opts.db_table
                 col = opts.get_field(f.rel.field_name).column
-                r_name = '%s_referencing_%s_%s' % (r_col, table, col)
-                if r_name in reference_names:
-                    reference_names[r_name] += 1
-                    r_name += '_%s' % reference_names[r_name]
-                else:
-                    reference_names[r_name] = 0
-                final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s);' % \
+                # For MySQL, r_name must be unique in the first 64 characters.
+                # So we are careful with character usage here.
+                r_name = '%s_refs_%s_%x' % (r_col, col, abs(hash((r_table, table))))
+                final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
                     (backend.quote_name(r_table), r_name,
-                    backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col)))
+                    backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col), 
+                    backend.get_deferrable_sql()))
             del pending_references[model]
     return final_output
 
@@ -226,18 +249,20 @@
                 (style.SQL_FIELD(backend.quote_name('id')),
                 style.SQL_COLTYPE(data_types['AutoField']),
                 style.SQL_KEYWORD('NOT NULL PRIMARY KEY')))
-            table_output.append('    %s %s %s %s (%s),' % \
+            table_output.append('    %s %s %s %s (%s)%s,' % \
                 (style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
                 style.SQL_COLTYPE(data_types[get_rel_data_type(opts.pk)] % opts.pk.__dict__),
                 style.SQL_KEYWORD('NOT NULL REFERENCES'),
                 style.SQL_TABLE(backend.quote_name(opts.db_table)),
-                style.SQL_FIELD(backend.quote_name(opts.pk.column))))
-            table_output.append('    %s %s %s %s (%s),' % \
+                style.SQL_FIELD(backend.quote_name(opts.pk.column)),
+                backend.get_deferrable_sql()))
+            table_output.append('    %s %s %s %s (%s)%s,' % \
                 (style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())),
                 style.SQL_COLTYPE(data_types[get_rel_data_type(f.rel.to._meta.pk)] % f.rel.to._meta.pk.__dict__),
                 style.SQL_KEYWORD('NOT NULL REFERENCES'),
                 style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)),
-                style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column))))
+                style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column)),
+                backend.get_deferrable_sql()))
             table_output.append('    %s (%s, %s)' % \
                 (style.SQL_KEYWORD('UNIQUE'),
                 style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
@@ -251,7 +276,7 @@
     from opal.db import backend, connection, models, get_introspection_module
     introspection = get_introspection_module()
 
-    # This should work even if a connecton isn't available
+    # This should work even if a connection isn't available
     try:
         cursor = connection.cursor()
     except:
@@ -295,7 +320,7 @@
                         (style.SQL_KEYWORD('ALTER TABLE'),
                         style.SQL_TABLE(backend.quote_name(table)),
                         style.SQL_KEYWORD(backend.get_drop_foreignkey_sql()),
-                        style.SQL_FIELD(backend.quote_name("%s_referencing_%s_%s" % (col, r_table, r_col)))))
+                        style.SQL_FIELD(backend.quote_name('%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table))))))))
                 del references_to_delete[model]
 
     # Output DROP TABLE statements for many-to-many tables.
@@ -324,7 +349,15 @@
 get_sql_reset.help_doc = "Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s)."
 get_sql_reset.args = APP_ARGS
 
-def get_sql_initial_data_for_model(model):
+def get_sql_flush():
+    "Returns a list of the SQL statements used to flush the database"
+    from opal.db import backend
+    statements = backend.get_sql_flush(style, _get_table_list(), _get_sequence_list())
+    return statements
+get_sql_flush.help_doc = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
+get_sql_flush.args = ''
+
+def get_custom_sql_for_model(model):
     from opal.db import models
     from opal.conf import settings
 
@@ -341,16 +374,18 @@
                  os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
     for sql_file in sql_files:
         if os.path.exists(sql_file):
-            fp = open(sql_file)
+            fp = open(sql_file, 'U')
             for statement in statements.split(fp.read()):
+                # Remove any comments from the file
+                statement = re.sub(r"--.*[\n\Z]", "", statement)
                 if statement.strip():
                     output.append(statement + ";")
             fp.close()
 
     return output
 
-def get_sql_initial_data(app):
-    "Returns a list of the initial INSERT SQL statements for the given app."
+def get_custom_sql(app):
+    "Returns a list of the custom table modifying SQL statements for the given app."
     from opal.db.models import get_models
     output = []
 
@@ -358,12 +393,18 @@
     app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
 
     for model in app_models:
-        output.extend(get_sql_initial_data_for_model(model))
+        output.extend(get_custom_sql_for_model(model))
 
     return output
-get_sql_initial_data.help_doc = "Prints the initial INSERT SQL statements for the given app name(s)."
-get_sql_initial_data.args = APP_ARGS
+get_custom_sql.help_doc = "Prints the custom table modifying SQL statements for the given app name(s)."
+get_custom_sql.args = APP_ARGS
 
+def get_sql_initial_data(apps):
+    "Returns a list of the initial INSERT SQL statements for the given app."
+    return style.ERROR("This action has been renamed. Try './manage.py sqlcustom %s'." % ' '.join(apps and apps or ['app1', 'app2']))
+get_sql_initial_data.help_doc = "RENAMED: see 'sqlcustom'"
+get_sql_initial_data.args = ''
+
 def get_sql_sequence_reset(app):
     "Returns a list of the SQL statements to reset PostgreSQL sequences for the given app."
     from opal.db import backend, models
@@ -392,37 +433,54 @@
 get_sql_sequence_reset.args = APP_ARGS
 
 def get_sql_indexes(app):
-    "Returns a list of the CREATE INDEX SQL statements for the given app."
-    from opal.db import backend, models
+    "Returns a list of the CREATE INDEX SQL statements for all models in the given app."
+    from opal.db import models
     output = []
-
     for model in models.get_models(app):
-        for f in model._meta.fields:
-            if f.db_index:
-                unique = f.unique and 'UNIQUE ' or ''
-                output.append(
-                    style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \
-                    style.SQL_TABLE('%s_%s' % (model._meta.db_table, f.column)) + ' ' + \
-                    style.SQL_KEYWORD('ON') + ' ' + \
-                    style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \
-                    "(%s);" % style.SQL_FIELD(backend.quote_name(f.column))
-                )
+        output.extend(get_sql_indexes_for_model(model))
     return output
 get_sql_indexes.help_doc = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
 get_sql_indexes.args = APP_ARGS
 
+def get_sql_indexes_for_model(model):
+    "Returns the CREATE INDEX SQL statements for a single model"
+    from opal.db import backend
+    output = []
+
+    for f in model._meta.fields:
+        if f.db_index:
+            unique = f.unique and 'UNIQUE ' or ''
+            output.append(
+                style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \
+                style.SQL_TABLE('%s_%s' % (model._meta.db_table, f.column)) + ' ' + \
+                style.SQL_KEYWORD('ON') + ' ' + \
+                style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \
+                "(%s);" % style.SQL_FIELD(backend.quote_name(f.column))
+            )
+    return output
+
 def get_sql_all(app):
     "Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
-    return get_sql_create(app) + get_sql_initial_data(app) + get_sql_indexes(app)
+    return get_sql_create(app) + get_custom_sql(app) + get_sql_indexes(app)
 get_sql_all.help_doc = "Prints the CREATE TABLE, initial-data and CREATE INDEX SQL statements for the given model module name(s)."
 get_sql_all.args = APP_ARGS
 
-def syncdb():
+def _emit_post_sync_signal(created_models, verbosity, interactive):
+    from opal.db import models
+    from opal.dispatch import dispatcher
+    # Emit the post_sync signal for every application.
+    for app in models.get_apps():
+        app_name = app.__name__.split('.')[-2]
+        if verbosity >= 2:
+            print "Running post-sync handlers for application", app_name
+        dispatcher.send(signal=models.signals.post_syncdb, sender=app,
+            app=app, created_models=created_models,
+            verbosity=verbosity, interactive=interactive)
+
+def syncdb(verbosity=1, interactive=True):
     "Creates the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
     from opal.db import connection, transaction, models, get_creation_module
-    from opal.db.models import signals
     from opal.conf import settings
-    from opal.dispatch import dispatcher
 
     disable_termcolors()
 
@@ -433,7 +491,7 @@
     # dispatcher events.
     for app_name in settings.INSTALLED_APPS:
         try:
-            __import__(app_name + '.management', '', '', [''])
+            __import__(app_name + '.management', {}, {}, [''])
         except ImportError:
             pass
 
@@ -450,60 +508,90 @@
     created_models = set()
     pending_references = {}
 
+    # Create the tables for each model
     for app in models.get_apps():
+        app_name = app.__name__.split('.')[-2]
         model_list = models.get_models(app)
         for model in model_list:
             # Create the model's database table, if it doesn't already exist.
+            if verbosity >= 2:
+                print "Processing %s.%s model" % (app_name, model._meta.object_name)
             if model._meta.db_table in table_list:
                 continue
             sql, references = _get_sql_model_create(model, seen_models)
             seen_models.add(model)
             created_models.add(model)
             for refto, refs in references.items():
-                try:
-                    pending_references[refto].extend(refs)
-                except KeyError:
-                    pending_references[refto] = refs
+                pending_references.setdefault(refto, []).extend(refs)
             sql.extend(_get_sql_for_pending_references(model, pending_references))
-            print "Creating table %s" % model._meta.db_table
+            if verbosity >= 1:
+                print "Creating table %s" % model._meta.db_table
             for statement in sql:
                 cursor.execute(statement)
             table_list.append(model._meta.db_table)
 
+    # Create the m2m tables. This must be done after all tables have been created
+    # to ensure that all referred tables will exist.
+    for app in models.get_apps():
+        app_name = app.__name__.split('.')[-2]
+        model_list = models.get_models(app)
         for model in model_list:
             if model in created_models:
                 sql = _get_many_to_many_sql_for_model(model)
                 if sql:
-                    print "Creating many-to-many tables for %s model" % model.__name__
+                    if verbosity >= 2:
+                        print "Creating many-to-many tables for %s.%s model" % (app_name, model._meta.object_name)
                     for statement in sql:
                         cursor.execute(statement)
 
-        transaction.commit_unless_managed()
+    transaction.commit_unless_managed()
 
     # Send the post_syncdb signal, so individual apps can do whatever they need
     # to do at this point.
+    _emit_post_sync_signal(created_models, verbosity, interactive)
+
+    # Install custom SQL for the app (but only if this 
+    # is a model we've just created)
     for app in models.get_apps():
-        dispatcher.send(signal=signals.post_syncdb, sender=app,
-            app=app, created_models=created_models)
+        for model in models.get_models(app):
+            if model in created_models:
+                custom_sql = get_custom_sql_for_model(model)
+                if custom_sql:
+                    if verbosity >= 1:
+                        print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name)
+                    try:
+                        for sql in custom_sql:
+                            cursor.execute(sql)
+                    except Exception, e:
+                        sys.stderr.write("Failed to install custom SQL for %s.%s model: %s" % \
+                                            (app_name, model._meta.object_name, e))
+                        transaction.rollback_unless_managed()
+                    else:
+                        transaction.commit_unless_managed()
 
-        # Install initial data for the app (but only if this is a model we've
-        # just created)
+    # Install SQL indicies for all newly created models
+    for app in models.get_apps():
+        app_name = app.__name__.split('.')[-2]
         for model in models.get_models(app):
             if model in created_models:
-                initial_sql = get_sql_initial_data_for_model(model)
-                if initial_sql:
-                    print "Installing initial data for %s model" % model._meta.object_name
+                index_sql = get_sql_indexes_for_model(model)
+                if index_sql:
+                    if verbosity >= 1:
+                        print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
                     try:
-                        for sql in initial_sql:
+                        for sql in index_sql:
                             cursor.execute(sql)
                     except Exception, e:
-                        sys.stderr.write("Failed to install initial SQL data for %s model: %s" % \
-                                            (model._meta.object_name, e))
+                        sys.stderr.write("Failed to install index for %s.%s model: %s" % \
+                                            (app_name, model._meta.object_name, e))
                         transaction.rollback_unless_managed()
                     else:
                         transaction.commit_unless_managed()
 
-syncdb.args = ''
+    # Install the 'initialdata' fixture, using format discovery
+    load_data(['initial_data'], verbosity=verbosity)
+syncdb.help_doc = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
+syncdb.args = '[--verbosity] [--interactive]'
 
 def get_admin_index(app):
     "Returns admin-index template snippet (in list form) for the given app."
@@ -533,72 +621,109 @@
     "Converts a module namespace to a Python dictionary. Used by get_settings_diff."
     return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
 
-def install(app):
-    "Executes the equivalent of 'get_sql_all' in the current database."
+def reset(app, interactive=True):
+    "Executes the equivalent of 'get_sql_reset' in the current database."
     from opal.db import connection, transaction
-
+    from opal.conf import settings
     app_name = app.__name__.split('.')[-2]
 
     disable_termcolors()
 
     # First, try validating the models.
     _check_for_validation_errors(app)
+    sql_list = get_sql_reset(app)
 
-    sql_list = get_sql_all(app)
+    if interactive:
+        confirm = raw_input("""
+You have requested a database reset.
+This will IRREVERSIBLY DESTROY any data for
+the "%s" application in the database "%s".
+Are you sure you want to do this?
 
-    try:
-        cursor = connection.cursor()
-        for sql in sql_list:
-            cursor.execute(sql)
-    except Exception, e:
-        sys.stderr.write(style.ERROR("""Error: %s couldn't be installed. Possible reasons:
+Type 'yes' to continue, or 'no' to cancel: """ % (app_name, settings.DATABASE_NAME))
+    else:
+        confirm = 'yes'
+
+    if confirm == 'yes':
+        try:
+            cursor = connection.cursor()
+            for sql in sql_list:
+                cursor.execute(sql)
+        except Exception, e:
+            sys.stderr.write(style.ERROR("""Error: %s couldn't be reset. Possible reasons:
   * The database isn't running or isn't configured correctly.
-  * At least one of the database tables already exists.
+  * At least one of the database tables doesn't exist.
   * The SQL was invalid.
-Hint: Look at the output of 'django-admin.py sqlall %s'. That's the SQL this command wasn't able to run.
+Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
 The full error: """ % (app_name, app_name)) + style.ERROR_OUTPUT(str(e)) + '\n')
-        transaction.rollback_unless_managed()
-        sys.exit(1)
-    transaction.commit_unless_managed()
-install.help_doc = "Executes ``sqlall`` for the given app(s) in the current database."
-install.args = APP_ARGS
+            transaction.rollback_unless_managed()
+            sys.exit(1)
+        transaction.commit_unless_managed()
+    else:
+        print "Reset cancelled."
+reset.help_doc = "Executes ``sqlreset`` for the given app(s) in the current database."
+reset.args = '[--interactive]' + APP_ARGS
 
-def reset(app):
-    "Executes the equivalent of 'get_sql_reset' in the current database."
-    from opal.db import connection, transaction
-    app_name = app.__name__.split('.')[-2]
-
+def flush(verbosity=1, interactive=True):
+    "Returns all tables in the database to the same state they were in immediately after syncdb."
+    from opal.conf import settings
+    from opal.db import connection, transaction, models
+    from opal.dispatch import dispatcher
+    
     disable_termcolors()
 
     # First, try validating the models.
-    _check_for_validation_errors(app)
-    sql_list = get_sql_reset(app)
+    _check_for_validation_errors()
 
-    confirm = raw_input("""
-You have requested a database reset.
-This will IRREVERSIBLY DESTROY any data in your database.
+    # Import the 'management' module within each installed app, to register
+    # dispatcher events.
+    for app_name in settings.INSTALLED_APPS:
+        try:
+            __import__(app_name + '.management', {}, {}, [''])
+        except ImportError:
+            pass
+    
+    sql_list = get_sql_flush()
+
+    if interactive:
+        confirm = raw_input("""
+You have requested a flush of the database.
+This will IRREVERSIBLY DESTROY all data currently in the database,
+and return each table to the state it was in after syncdb.
 Are you sure you want to do this?
 
 Type 'yes' to continue, or 'no' to cancel: """)
+    else:
+        confirm = 'yes'
+
     if confirm == 'yes':
         try:
             cursor = connection.cursor()
             for sql in sql_list:
                 cursor.execute(sql)
         except Exception, e:
-            sys.stderr.write(style.ERROR("""Error: %s couldn't be installed. Possible reasons:
+            sys.stderr.write(style.ERROR("""Error: Database %s couldn't be flushed. Possible reasons:
   * The database isn't running or isn't configured correctly.
-  * At least one of the database tables already exists.
+  * At least one of the expected database tables doesn't exist.
   * The SQL was invalid.
-Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
-The full error: """ % (app_name, app_name)) + style.ERROR_OUTPUT(str(e)) + '\n')
+Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
+The full error: """ % settings.DATABASE_NAME + style.ERROR_OUTPUT(str(e)) + '\n'))
             transaction.rollback_unless_managed()
             sys.exit(1)
         transaction.commit_unless_managed()
+
+        # Emit the post sync signal. This allows individual
+        # applications to respond as if the database had been
+        # sync'd from scratch.
+        _emit_post_sync_signal(models.get_models(), verbosity, interactive)
+        
+        # Reinstall the initial_data fixture
+        load_data(['initial_data'], verbosity=verbosity)
+        
     else:
-        print "Reset cancelled."
-reset.help_doc = "Executes ``sqlreset`` for the given app(s) in the current database."
-reset.args = APP_ARGS
+        print "Flush cancelled."
+flush.help_doc = "Executes ``sqlflush`` on the current database."
+flush.args = '[--verbosity] [--interactive]'
 
 def _start_helper(app_or_project, name, directory, other_name=''):
     other = {'project': 'app', 'app': 'project'}[app_or_project]
@@ -629,13 +754,16 @@
             fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
             fp_old.close()
             fp_new.close()
-            shutil.copymode(path_old, path_new)
+            try:
+                shutil.copymode(path_old, path_new)
+            except OSError:
+                sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
 
 def startproject(project_name, directory):
     "Creates a Django project for the given project_name in the given directory."
     from random import choice
     if project_name in INVALID_PROJECT_NAMES:
-        sys.stderr.write(style.ERROR("Error: %r isn't a valid project name. Please try another.\n" % project_name))
+        sys.stderr.write(style.ERROR("Error: '%r' conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name.\n" % project_name))
         sys.exit(1)
     _start_helper('project', project_name, directory)
     # Create a random SECRET_KEY hash, and put it in the main settings.
@@ -669,9 +797,7 @@
 
     introspection_module = get_introspection_module()
 
-    def table2model(table_name):
-        object_name = table_name.title().replace('_', '')
-        return object_name.endswith('s') and object_name[:-1] or object_name
+    table2model = lambda table_name: table_name.title().replace('_', '')
 
     cursor = connection.cursor()
     yield "# This is an auto-generated Django model module."
@@ -680,7 +806,7 @@
     yield "#     * Make sure each model has one field with primary_key=True"
     yield "# Feel free to rename the models, but don't rename db_table values or field names."
     yield "#"
-    yield "# Also note: You'll have to insert the output of 'django-admin.py sqlinitialdata [appname]'"
+    yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'"
     yield "# into your database."
     yield ''
     yield 'from opal.db import models'
@@ -700,6 +826,10 @@
             comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
             extra_params = {}  # Holds Field parameters such as 'db_column'.
 
+            if ' ' in att_name:
+                extra_params['db_column'] = att_name
+                att_name = att_name.replace(' ', '')
+                comment_notes.append('Field renamed to remove spaces.')
             if keyword.iskeyword(att_name):
                 extra_params['db_column'] = att_name
                 att_name += '_field'
@@ -785,7 +915,8 @@
     validates all models of all installed apps. Writes errors, if any, to outfile.
     Returns number of errors.
     """
-    from opal.db import models
+    from opal.conf import settings
+    from opal.db import models, connection
     from opal.db.models.loading import get_app_errors
     from opal.db.models.fields.related import RelatedObject
 
@@ -827,6 +958,12 @@
             if f.db_index not in (None, True, False):
                 e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
 
+            # Check that maxlength <= 255 if using older MySQL versions.
+            if settings.DATABASE_ENGINE == 'mysql':
+                db_version = connection.get_server_version()
+                if db_version < (5, 0, 3) and isinstance(f, (models.CharField, models.CommaSeparatedIntegerField, models.SlugField)) and f.maxlength > 255:
+                    e.add(opts, '"%s": %s cannot have a "maxlength" greater than 255 when you are using a version of MySQL prior to 5.0.3 (you are using %s).' % (f.name, f.__class__.__name__, '.'.join([str(n) for n in db_version[:3]])))
+
             # Check to see if the related field will clash with any
             # existing fields, m2m fields, m2m related objects or related objects
             if f.rel:
@@ -868,27 +1005,32 @@
 
             rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
             rel_query_name = f.related_query_name()
-            for r in rel_opts.fields:
-                if r.name == rel_name:
-                    e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
-                if r.name == rel_query_name:
-                    e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
-            for r in rel_opts.many_to_many:
-                if r.name == rel_name:
-                    e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
-                if r.name == rel_query_name:
-                    e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
-            for r in rel_opts.get_all_related_many_to_many_objects():
-                if r.field is not f:
+            # If rel_name is none, there is no reverse accessor.
+            # (This only occurs for symmetrical m2m relations to self).
+            # If this is the case, there are no clashes to check for this field, as
+            # there are no reverse descriptors for this field.
+            if rel_name is not None:
+                for r in rel_opts.fields:
+                    if r.name == rel_name:
+                        e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+                    if r.name == rel_query_name:
+                        e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+                for r in rel_opts.many_to_many:
+                    if r.name == rel_name:
+                        e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+                    if r.name == rel_query_name:
+                        e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+                for r in rel_opts.get_all_related_many_to_many_objects():
+                    if r.field is not f:
+                        if r.get_accessor_name() == rel_name:
+                            e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+                        if r.get_accessor_name() == rel_query_name:
+                            e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+                for r in rel_opts.get_all_related_objects():
                     if r.get_accessor_name() == rel_name:
-                        e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+                        e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
                     if r.get_accessor_name() == rel_query_name:
-                        e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
-            for r in rel_opts.get_all_related_objects():
-                if r.get_accessor_name() == rel_name:
-                    e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
-                if r.get_accessor_name() == rel_query_name:
-                    e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+                        e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
 
         # Check admin attribute.
         if opts.admin is not None:
@@ -918,7 +1060,8 @@
                         try:
                             f = opts.get_field(fn)
                         except models.FieldDoesNotExist:
-                            e.add(opts, '"admin.list_filter" refers to %r, which isn\'t a field.' % fn)
+                            if not hasattr(cls, fn):
+                                e.add(opts, '"admin.list_display_links" refers to %r, which isn\'t an attribute, method or property.' % fn)
                         if fn not in opts.admin.list_display:
                             e.add(opts, '"admin.list_display_links" refers to %r, which is not defined in "admin.list_display".' % fn)
                 # list_filter
@@ -930,6 +1073,12 @@
                             f = opts.get_field(fn)
                         except models.FieldDoesNotExist:
                             e.add(opts, '"admin.list_filter" refers to %r, which isn\'t a field.' % fn)
+                # date_hierarchy
+                if opts.admin.date_hierarchy:
+                    try:
+                        f = opts.get_field(opts.admin.date_hierarchy)
+                    except models.FieldDoesNotExist:
+                        e.add(opts, '"admin.date_hierarchy" refers to %r, which isn\'t a field.' % opts.admin.date_hierarchy)
 
         # Check ordering attribute.
         if opts.ordering:
@@ -970,10 +1119,12 @@
 
     return len(e.errors)
 
-def validate(outfile=sys.stdout):
+def validate(outfile=sys.stdout, silent_success=False):
     "Validates all installed models."
     try:
         num_errors = get_validation_errors(outfile)
+        if silent_success and num_errors == 0:
+            return
         outfile.write('%s error%s found.\n' % (num_errors, num_errors != 1 and 's' or ''))
     except ImproperlyConfigured:
         outfile.write("Skipping validation because things aren't configured properly.")
@@ -996,7 +1147,7 @@
         sys.stderr.write(s.read())
         sys.exit(1)
 
-def runserver(addr, port, use_reloader=True):
+def runserver(addr, port, use_reloader=True, admin_media_dir=''):
     "Starts a lightweight Web server for development."
     from opal.core.servers.basehttp import run, AdminMediaHandler, WSGIServerException
     from opal.core.handlers.wsgi import WSGIHandler
@@ -1014,7 +1165,10 @@
         print "Development server is running at http://%s:%s/" % (addr, port)
         print "Quit the server with %s." % quit_command
         try:
-            run(addr, int(port), AdminMediaHandler(WSGIHandler()))
+            import opal
+            path = admin_media_dir or opal.__path__[0] + '/contrib/admin/media'
+            handler = AdminMediaHandler(WSGIHandler(), path)
+            run(addr, int(port), handler)
         except WSGIServerException, e:
             # Use helpful error messages instead of ugly tracebacks.
             ERRORS = {
@@ -1035,7 +1189,7 @@
         autoreload.main(inner_run)
     else:
         inner_run()
-runserver.args = '[--noreload] [optional port number, or ipaddr:port]'
+runserver.args = '[--noreload] [--adminmedia=ADMIN_MEDIA_PATH] [optional port number, or ipaddr:port]'
 
 def createcachetable(tablename):
     "Creates the table needed to use the SQL cache backend"
@@ -1075,6 +1229,11 @@
 
 def run_shell(use_plain=False):
     "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
+    # XXX: (Temporary) workaround for ticket #1796: force early loading of all
+    # models from installed apps.
+    from opal.db.models.loading import get_models
+    loaded_models = get_models()
+
     try:
         if use_plain:
             # Don't bother loading IPython, because the user wants plain Python.
@@ -1105,19 +1264,173 @@
 dbshell.args = ""
 
 def runfcgi(args):
-    """Run this project as a FastCGI application. requires flup."""
+    "Runs this project as a FastCGI application. Requires flup."
+    from opal.conf import settings
+    from opal.utils import translation
+    # Activate the current language, because it won't get activated later.
+    try:
+        translation.activate(settings.LANGUAGE_CODE)
+    except AttributeError:
+        pass
     from opal.core.servers.fastcgi import runfastcgi
     runfastcgi(args)
 runfcgi.args = '[various KEY=val options, use `runfcgi help` for help]'
 
+def test(app_labels, verbosity=1):
+    "Runs the test suite for the specified applications"
+    from opal.conf import settings
+    from opal.db.models import get_app, get_apps
+
+    if len(app_labels) == 0:
+        app_list = get_apps()
+    else:
+        app_list = [get_app(app_label) for app_label in app_labels]
+
+    test_path = settings.TEST_RUNNER.split('.')
+    # Allow for Python 2.5 relative paths
+    if len(test_path) > 1:
+        test_module_name = '.'.join(test_path[:-1])
+    else:
+        test_module_name = '.'
+    test_module = __import__(test_module_name, {}, {}, test_path[-1])
+    test_runner = getattr(test_module, test_path[-1])
+
+    failures = test_runner(app_list, verbosity)
+    if failures:
+        sys.exit(failures)
+        
+test.help_doc = 'Runs the test suite for the specified applications, or the entire site if no apps are specified'
+test.args = '[--verbosity] ' + APP_ARGS
+
+def load_data(fixture_labels, verbosity=1):
+    "Installs the provided fixture file(s) as data in the database."
+    from opal.db.models import get_apps
+    from opal.core import serializers
+    from opal.db import connection, transaction
+    from opal.conf import settings
+    import sys
+     
+    # Keep a count of the installed objects and fixtures
+    count = [0,0]
+    
+    humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
+
+    # Get a cursor (even though we don't need one yet). This has
+    # the side effect of initializing the test database (if 
+    # it isn't already initialized).
+    cursor = connection.cursor()
+    
+    # Start transaction management. All fixtures are installed in a 
+    # single transaction to ensure that all references are resolved.
+    transaction.commit_unless_managed()
+    transaction.enter_transaction_management()
+    transaction.managed(True)
+    
+    app_fixtures = [os.path.join(os.path.dirname(app.__file__),'fixtures') for app in get_apps()]
+    for fixture_label in fixture_labels:
+        if verbosity > 0:
+            print "Loading '%s' fixtures..." % fixture_label
+        for fixture_dir in app_fixtures + list(settings.FIXTURE_DIRS) + ['']:
+            if verbosity > 1:
+                print "Checking %s for fixtures..." % humanize(fixture_dir)
+            parts = fixture_label.split('.')
+            if len(parts) == 1:
+                fixture_name = fixture_label
+                formats = serializers.get_serializer_formats()
+            else:
+                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
+                formats = [format]
+
+            label_found = False
+            for format in formats:
+                serializer = serializers.get_serializer(format)
+                if verbosity > 1:
+                    print "Trying %s for %s fixture '%s'..." % \
+                        (humanize(fixture_dir), format, fixture_name)
+                try:
+                    full_path = os.path.join(fixture_dir, '.'.join([fixture_name, format]))
+                    fixture = open(full_path, 'r')
+                    if label_found:
+                        fixture.close()
+                        print style.ERROR("Multiple fixtures named '%s' in %s. Aborting." % 
+                            (fixture_name, humanize(fixture_dir)))
+                        transaction.rollback()
+                        transaction.leave_transaction_management()
+                        return
+                    else:
+                        count[1] += 1
+                        if verbosity > 0:
+                            print "Installing %s fixture '%s' from %s." % \
+                                (format, fixture_name, humanize(fixture_dir))
+                        try:
+                            objects =  serializers.deserialize(format, fixture)
+                            for obj in objects:
+                                count[0] += 1
+                                obj.save()
+                            label_found = True
+                        except Exception, e:
+                            fixture.close()
+                            sys.stderr.write(
+                                style.ERROR("Problem installing fixture '%s': %s\n" % 
+                                     (full_path, str(e))))
+                            transaction.rollback()
+                            transaction.leave_transaction_management()
+                            return
+                        fixture.close()
+                except:
+                    if verbosity > 1:
+                        print "No %s fixture '%s' in %s." % \
+                            (format, fixture_name, humanize(fixture_dir))
+    if count[0] == 0:
+        if verbosity > 0:
+            print "No fixtures found."
+    else:
+        if verbosity > 0:
+            print "Installed %d object(s) from %d fixture(s)" % tuple(count)
+    transaction.commit()
+    transaction.leave_transaction_management()
+        
+load_data.help_doc = 'Installs the named fixture(s) in the database'
+load_data.args = "[--verbosity] fixture, fixture, ..."
+ 
+def dump_data(app_labels, format='json', indent=None):
+    "Output the current contents of the database as a fixture of the given format"
+    from opal.db.models import get_app, get_apps, get_models
+    from opal.core import serializers
+ 
+    if len(app_labels) == 0:
+        app_list = get_apps()
+    else:
+        app_list = [get_app(app_label) for app_label in app_labels]
+ 
+    # Check that the serialization format exists; this is a shortcut to
+    # avoid collating all the objects and _then_ failing.
+    try:
+        serializers.get_serializer(format)
+    except KeyError:
+        sys.stderr.write(style.ERROR("Unknown serialization format: %s\n" % format))        
+    
+    objects = []
+    for app in app_list:
+        for model in get_models(app):
+            objects.extend(model.objects.all())
+    try:
+        return serializers.serialize(format, objects, indent=indent)
+    except Exception, e:
+        sys.stderr.write(style.ERROR("Unable to serialize database: %s\n" % e))
+dump_data.help_doc = 'Output the contents of the database as a fixture of the given format'
+dump_data.args = '[--format]' + APP_ARGS
+
 # Utilities for command-line script
 
 DEFAULT_ACTION_MAPPING = {
     'adminindex': get_admin_index,
     'createcachetable' : createcachetable,
     'dbshell': dbshell,
+    'dumpdata': dump_data,
+    'flush': flush,
     'inspectdb': inspectdb,
-    'install': install,
+    'loaddata': load_data,
     'reset': reset,
     'runfcgi': runfcgi,
     'runserver': runserver,
@@ -1125,6 +1438,8 @@
     'sql': get_sql_create,
     'sqlall': get_sql_all,
     'sqlclear': get_sql_delete,
+    'sqlcustom': get_custom_sql,
+    'sqlflush': get_sql_flush,
     'sqlindexes': get_sql_indexes,
     'sqlinitialdata': get_sql_initial_data,
     'sqlreset': get_sql_reset,
@@ -1133,6 +1448,7 @@
     'startproject': startproject,
     'syncdb': syncdb,
     'validate': validate,
+    'test':test,
 }
 
 NO_SQL_TRANSACTION = (
@@ -1180,8 +1496,19 @@
         help='Lets you manually add a directory the Python path, e.g. "/home/djangoprojects/myproject".')
     parser.add_option('--plain', action='store_true', dest='plain',
         help='Tells Django to use plain Python, not IPython, for "shell" command.')
+    parser.add_option('--noinput', action='store_false', dest='interactive', default=True,
+        help='Tells Django to NOT prompt the user for input of any kind.')
     parser.add_option('--noreload', action='store_false', dest='use_reloader', default=True,
         help='Tells Django to NOT use the auto-reloader when running the development server.')
+    parser.add_option('--format', default='json', dest='format',
+        help='Specifies the output serialization format for fixtures')    
+    parser.add_option('--indent', default=None, dest='indent',
+        type='int', help='Specifies the indent level to use when pretty-printing output')
+    parser.add_option('--verbosity', action='store', dest='verbosity', default='1',
+        type='choice', choices=['0', '1', '2'],
+        help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+    parser.add_option('--adminmedia', dest='admin_media_path', default='', help='Specifies the directory from which to serve admin media for runserver.'),
+
     options, args = parser.parse_args(argv[1:])
 
     # Take care of options.
@@ -1206,8 +1533,10 @@
 
     if action == 'shell':
         action_mapping[action](options.plain is True)
-    elif action in ('syncdb', 'validate', 'dbshell'):
+    elif action in ('validate', 'dbshell'):
         action_mapping[action]()
+    elif action in ('flush', 'syncdb'):
+        action_mapping[action](int(options.verbosity), options.interactive)
     elif action == 'inspectdb':
         try:
             for line in action_mapping[action]():
@@ -1220,6 +1549,16 @@
             action_mapping[action](args[1])
         except IndexError:
             parser.print_usage_and_exit()
+    elif action in ('test', 'loaddata'):
+        try:
+            action_mapping[action](args[1:], int(options.verbosity))
+        except IndexError:
+            parser.print_usage_and_exit()
+    elif action == 'dumpdata':
+        try:
+            print action_mapping[action](args[1:], options.format, options.indent)
+        except IndexError:
+            parser.print_usage_and_exit()
     elif action in ('startapp', 'startproject'):
         try:
             name = args[1]
@@ -1235,11 +1574,16 @@
                 addr, port = args[1].split(':')
             except ValueError:
                 addr, port = '', args[1]
-        action_mapping[action](addr, port, options.use_reloader)
+        action_mapping[action](addr, port, options.use_reloader, options.admin_media_path)
     elif action == 'runfcgi':
         action_mapping[action](args[1:])
+    elif action == 'sqlinitialdata':
+        print action_mapping[action](args[1:])
+    elif action == 'sqlflush':
+        print '\n'.join(action_mapping[action]())
     else:
         from opal.db import models
+        validate(silent_success=True)
         try:
             mod_list = [models.get_app(app_label) for app_label in args[1:]]
         except ImportError, e:
@@ -1250,22 +1594,33 @@
         if action not in NO_SQL_TRANSACTION:
             print style.SQL_KEYWORD("BEGIN;")
         for mod in mod_list:
-            output = action_mapping[action](mod)
+            if action == 'reset':
+                output = action_mapping[action](mod, options.interactive)
+            else:
+                output = action_mapping[action](mod)
             if output:
                 print '\n'.join(output)
         if action not in NO_SQL_TRANSACTION:
             print style.SQL_KEYWORD("COMMIT;")
 
-def execute_manager(settings_mod, argv=None):
+def setup_environ(settings_mod):
+    """
+    Configure the runtime environment. This can also be used by external
+    scripts wanting to set up a similar environment to manage.py.
+    """
     # Add this project to sys.path so that it's importable in the conventional
     # way. For example, if this file (manage.py) lives in a directory
     # "myproject", this code would add "/path/to/myproject" to sys.path.
     project_directory = os.path.dirname(settings_mod.__file__)
     project_name = os.path.basename(project_directory)
     sys.path.append(os.path.join(project_directory, '..'))
-    project_module = __import__(project_name, '', '', [''])
+    project_module = __import__(project_name, {}, {}, [''])
     sys.path.pop()
 
+    return project_directory
+
+def execute_manager(settings_mod, argv=None):
+    project_directory = setup_environ(settings_mod)
     action_mapping = DEFAULT_ACTION_MAPPING.copy()
 
     # Remove the "startproject" command from the action_mapping, because that's

Modified: cs/pythia/trunk/opal/core/paginator.py
===================================================================
--- cs/pythia/trunk/opal/core/paginator.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/paginator.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,54 +1,46 @@
-from math import ceil
-
 class InvalidPage(Exception):
     pass
 
 class ObjectPaginator(object):
     """
-    This class makes pagination easy. Feed it a QuerySet, plus the number of
-    objects you want on each page. Then read the hits and pages properties to
+    This class makes pagination easy. Feed it a QuerySet or list, plus the number
+    of objects you want on each page. Then read the hits and pages properties to
     see how many pages it involves. Call get_page with a page number (starting
     at 0) to get back a list of objects for that page.
 
     Finally, check if a page number has a next/prev page using
     has_next_page(page_number) and has_previous_page(page_number).
+    
+    Use orphans to avoid small final pages. For example:
+    13 records, num_per_page=10, orphans=2 --> pages==2, len(self.get_page(0))==10
+    12 records, num_per_page=10, orphans=2 --> pages==1, len(self.get_page(0))==12
     """
-    def __init__(self, query_set, num_per_page):
+    def __init__(self, query_set, num_per_page, orphans=0):
         self.query_set = query_set
         self.num_per_page = num_per_page
-        self._hits, self._pages = None, None
-        self._has_next = {} # Caches page_number -> has_next_boolean
+        self.orphans = orphans
+        self._hits = self._pages = None
 
-    def get_page(self, page_number):
+    def validate_page_number(self, page_number):
         try:
             page_number = int(page_number)
         except ValueError:
             raise InvalidPage
-        if page_number < 0:
+        if page_number < 0 or page_number > self.pages - 1:
             raise InvalidPage
+        return page_number
 
-        # Retrieve one extra record, and check for the existence of that extra
-        # record to determine whether there's a next page.
-        limit = self.num_per_page + 1
-        offset = page_number * self.num_per_page
+    def get_page(self, page_number):
+        page_number = self.validate_page_number(page_number)
+        bottom = page_number * self.num_per_page
+        top = bottom + self.num_per_page
+        if top + self.orphans >= self.hits:
+            top = self.hits
+        return self.query_set[bottom:top]
 
-        object_list = list(self.query_set[offset:offset+limit])
-
-        if not object_list:
-            raise InvalidPage
-
-        self._has_next[page_number] = (len(object_list) > self.num_per_page)
-        return object_list[:self.num_per_page]
-
     def has_next_page(self, page_number):
         "Does page $page_number have a 'next' page?"
-        if not self._has_next.has_key(page_number):
-            if self._pages is None:
-                offset = (page_number + 1) * self.num_per_page
-                self._has_next[page_number] = len(self.query_set[offset:offset+1]) > 0
-            else:
-                self._has_next[page_number] = page_number < (self.pages - 1)
-        return self._has_next[page_number]
+        return page_number < self.pages - 1
 
     def has_previous_page(self, page_number):
         return page_number > 0
@@ -58,8 +50,7 @@
         Returns the 1-based index of the first object on the given page,
         relative to total objects found (hits).
         """
-        if page_number == 0:
-            return 1
+        page_number = self.validate_page_number(page_number)
         return (self.num_per_page * page_number) + 1
 
     def last_on_page(self, page_number):
@@ -67,20 +58,30 @@
         Returns the 1-based index of the last object on the given page,
         relative to total objects found (hits).
         """
-        if page_number == 0 and self.num_per_page >= self._hits:
-            return self._hits
-        elif page_number == (self._pages - 1) and (page_number + 1) * self.num_per_page > self._hits:
-            return self._hits
-        return (page_number + 1) * self.num_per_page
+        page_number = self.validate_page_number(page_number)
+        page_number += 1   # 1-base
+        if page_number == self.pages:
+            return self.hits
+        return page_number * self.num_per_page
 
     def _get_hits(self):
         if self._hits is None:
-            self._hits = self.query_set.count()
+            # Try .count() or fall back to len().
+            try:
+                self._hits = int(self.query_set.count())
+            except (AttributeError, TypeError, ValueError):
+                # AttributeError if query_set has no object count.
+                # TypeError if query_set.count() required arguments.
+                # ValueError if int() fails.
+                self._hits = len(self.query_set)
         return self._hits
 
     def _get_pages(self):
         if self._pages is None:
-            self._pages = int(ceil(self.hits / float(self.num_per_page)))
+            hits = (self.hits - 1 - self.orphans)
+            if hits < 1:
+                hits = 0
+            self._pages = hits // self.num_per_page + 1
         return self._pages
 
     hits = property(_get_hits)

Modified: cs/pythia/trunk/opal/core/serializers/__init__.py
===================================================================
--- cs/pythia/trunk/opal/core/serializers/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/serializers/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -25,11 +25,18 @@
     "json"   : "opal.core.serializers.json",
 }
 
+# Check for PyYaml and register the serializer if it's available.
+try:
+    import yaml
+    BUILTIN_SERIALIZERS["yaml"] = "opal.core.serializers.pyyaml"
+except ImportError:
+    pass    
+
 _serializers = {}
         
 def register_serializer(format, serializer_module):
     """Register a new serializer by passing in a module name."""
-    module = __import__(serializer_module, '', '', [''])
+    module = __import__(serializer_module, {}, {}, [''])
     _serializers[format] = module
     
 def unregister_serializer(format):
@@ -40,6 +47,11 @@
     if not _serializers:
         _load_serializers()
     return _serializers[format].Serializer
+
+def get_serializer_formats():
+    if not _serializers:
+        _load_serializers()
+    return _serializers.keys()
     
 def get_deserializer(format):
     if not _serializers:

Modified: cs/pythia/trunk/opal/core/serializers/base.py
===================================================================
--- cs/pythia/trunk/opal/core/serializers/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/serializers/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -11,7 +11,7 @@
 class SerializationError(Exception):
     """Something bad happened during serialization."""
     pass
-    
+
 class DeserializationError(Exception):
     """Something bad happened during deserialization."""
     pass
@@ -20,31 +20,35 @@
     """
     Abstract serializer base class.
     """
-    
+
     def serialize(self, queryset, **options):
         """
         Serialize a queryset.
         """
         self.options = options
-        
+
         self.stream = options.get("stream", StringIO())
-        
+        self.selected_fields = options.get("fields")
+
         self.start_serialization()
         for obj in queryset:
             self.start_object(obj)
             for field in obj._meta.fields:
-                if field is obj._meta.pk:
-                    continue
-                elif field.rel is None:
-                    self.handle_field(obj, field)
-                else:
-                    self.handle_fk_field(obj, field)
+                if field.serialize:
+                    if field.rel is None:
+                        if self.selected_fields is None or field.attname in self.selected_fields:
+                            self.handle_field(obj, field)
+                    else:
+                        if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
+                            self.handle_fk_field(obj, field)
             for field in obj._meta.many_to_many:
-                self.handle_m2m_field(obj, field)
+                if field.serialize:
+                    if self.selected_fields is None or field.attname in self.selected_fields:
+                        self.handle_m2m_field(obj, field)
             self.end_object(obj)
         self.end_serialization()
         return self.getvalue()
-    
+
     def get_string_value(self, obj, field):
         """
         Convert a field's value to a string.
@@ -56,49 +60,49 @@
         else:
             value = field.flatten_data(follow=None, obj=obj).get(field.name, "")
         return str(value)
-    
+
     def start_serialization(self):
         """
         Called when serializing of the queryset starts.
         """
         raise NotImplementedError
-    
+
     def end_serialization(self):
         """
         Called when serializing of the queryset ends.
         """
         pass
-    
+
     def start_object(self, obj):
         """
         Called when serializing of an object starts.
         """
         raise NotImplementedError
-    
+
     def end_object(self, obj):
         """
         Called when serializing of an object ends.
         """
         pass
-    
+
     def handle_field(self, obj, field):
         """
         Called to handle each individual (non-relational) field on an object.
         """
         raise NotImplementedError
-    
+
     def handle_fk_field(self, obj, field):
         """
         Called to handle a ForeignKey field.
         """
         raise NotImplementedError
-    
+
     def handle_m2m_field(self, obj, field):
         """
         Called to handle a ManyToManyField.
         """
         raise NotImplementedError
-    
+
     def getvalue(self):
         """
         Return the fully serialized queryset.
@@ -109,7 +113,7 @@
     """
     Abstract base deserializer class.
     """
-    
+
     def __init__(self, stream_or_string, **options):
         """
         Init this serializer given a stream or a string
@@ -123,39 +127,39 @@
         # deserialization starts (otherwise subclass calls to get_model()
         # and friends might fail...)
         models.get_apps()
-    
+
     def __iter__(self):
         return self
-    
+
     def next(self):
         """Iteration iterface -- return the next item in the stream"""
         raise NotImplementedError
-        
+
 class DeserializedObject(object):
     """
-    A deserialzed model.
-    
+    A deserialized model.
+
     Basically a container for holding the pre-saved deserialized data along
     with the many-to-many data saved with the object.
-    
+
     Call ``save()`` to save the object (with the many-to-many data) to the
     database; call ``save(save_m2m=False)`` to save just the object fields
     (and not touch the many-to-many stuff.)
     """
-    
+
     def __init__(self, obj, m2m_data=None):
         self.object = obj
         self.m2m_data = m2m_data
-        
+
     def __repr__(self):
         return "<DeserializedObject: %s>" % str(self.object)
-        
+
     def save(self, save_m2m=True):
         self.object.save()
         if self.m2m_data and save_m2m:
             for accessor_name, object_list in self.m2m_data.items():
                 setattr(self.object, accessor_name, object_list)
-        
-        # prevent a second (possibly accidental) call to save() from saving 
+
+        # prevent a second (possibly accidental) call to save() from saving
         # the m2m data twice.
         self.m2m_data = None

Modified: cs/pythia/trunk/opal/core/serializers/json.py
===================================================================
--- cs/pythia/trunk/opal/core/serializers/json.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/serializers/json.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -16,7 +16,7 @@
     Convert a queryset to JSON.
     """
     def end_serialization(self):
-        simplejson.dump(self.objects, self.stream, cls=DateTimeAwareJSONEncoder)
+        simplejson.dump(self.objects, self.stream, cls=DateTimeAwareJSONEncoder, **self.options)
         
     def getvalue(self):
         return self.stream.getvalue()
@@ -41,11 +41,11 @@
     TIME_FORMAT = "%H:%M:%S"
     
     def default(self, o):
-        if isinstance(o, datetime.date):
+        if isinstance(o, datetime.datetime):
+            return o.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
+        elif isinstance(o, datetime.date):
             return o.strftime(self.DATE_FORMAT)
         elif isinstance(o, datetime.time):
             return o.strftime(self.TIME_FORMAT)
-        elif isinstance(o, datetime.datetime):
-            return o.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
         else:
-            return super(self, DateTimeAwareJSONEncoder).default(o)
+            return super(DateTimeAwareJSONEncoder, self).default(o)

Modified: cs/pythia/trunk/opal/core/serializers/python.py
===================================================================
--- cs/pythia/trunk/opal/core/serializers/python.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/serializers/python.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -57,7 +57,7 @@
     for d in object_list:
         # Look up the model and starting build a dict of data for it.
         Model = _get_model(d["model"])
-        data = {Model._meta.pk.name : d["pk"]}
+        data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
         m2m_data = {}
         
         # Handle each field
@@ -67,20 +67,20 @@
                 
             field = Model._meta.get_field(field_name)
             
-            # Handle M2M relations (with in_bulk() for performance)
+            # Handle M2M relations
             if field.rel and isinstance(field.rel, models.ManyToManyRel):
                 pks = []
+                m2m_convert = field.rel.to._meta.pk.to_python
                 for pk in field_value:
                     if isinstance(pk, unicode):
-                        pk = pk.encode(options.get("encoding", settings.DEFAULT_CHARSET))
-                m2m_data[field.name] = field.rel.to._default_manager.in_bulk(field_value).values()
+                        pks.append(m2m_convert(pk.encode(options.get("encoding", settings.DEFAULT_CHARSET))))
+                    else:
+                        pks.append(m2m_convert(pk))
+                m2m_data[field.name] = pks
                 
             # Handle FK fields
             elif field.rel and isinstance(field.rel, models.ManyToOneRel):
-                try:
-                    data[field.name] = field.rel.to._default_manager.get(pk=field_value)
-                except field.rel.to.DoesNotExist:
-                    data[field.name] = None
+                data[field.attname] = field.rel.to._meta.pk.to_python(field_value)
                     
             # Handle all other fields
             else:

Copied: cs/pythia/trunk/opal/core/serializers/pyyaml.py (from rev 7719, vendor/django/current/django/core/serializers/pyyaml.py)

Modified: cs/pythia/trunk/opal/core/serializers/xml_serializer.py
===================================================================
--- cs/pythia/trunk/opal/core/serializers/xml_serializer.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/serializers/xml_serializer.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -13,6 +13,10 @@
     Serializes a QuerySet to XML.
     """
     
+    def indent(self, level):
+        if self.options.get('indent', None) is not None:
+            self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
+
     def start_serialization(self):
         """
         Start serialization -- open the XML document and the root element.
@@ -25,6 +29,7 @@
         """
         End serialization -- end the document.
         """
+        self.indent(0)
         self.xml.endElement("django-objects")
         self.xml.endDocument()
         
@@ -35,6 +40,7 @@
         if not hasattr(obj, "_meta"):
             raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
             
+        self.indent(1)
         self.xml.startElement("object", {
             "pk"    : str(obj._get_pk_val()),
             "model" : str(obj._meta),
@@ -44,6 +50,7 @@
         """
         Called after handling all fields for an object.
         """
+        self.indent(1)
         self.xml.endElement("object")
         
     def handle_field(self, obj, field):
@@ -51,16 +58,19 @@
         Called to handle each field on an object (except for ForeignKeys and
         ManyToManyFields)
         """
+        self.indent(2)
         self.xml.startElement("field", {
             "name" : field.name,
             "type" : field.get_internal_type()
         })
         
         # Get a "string version" of the object's data (this is handled by the
-        # serializer base class).  None is handled specially.
-        value = self.get_string_value(obj, field)
-        if value is not None:
+        # serializer base class). 
+        if getattr(obj, field.name) is not None:
+            value = self.get_string_value(obj, field)
             self.xml.characters(str(value))
+        else:
+            self.xml.addQuickElement("None")
 
         self.xml.endElement("field")
         
@@ -92,6 +102,7 @@
         """
         Helper to output the <field> element for relational fields
         """
+        self.indent(2)
         self.xml.startElement("field", {
             "name" : field.name,
             "rel"  : field.rel.__class__.__name__,
@@ -127,7 +138,8 @@
         pk = node.getAttribute("pk")
         if not pk:
             raise base.DeserializationError("<object> node is missing the 'pk' attribute")
-        data = {Model._meta.pk.name : pk}
+
+        data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
         
         # Also start building a dict of m2m data (this is saved as
         # {m2m_accessor_attribute : [list_of_related_objects]})
@@ -148,37 +160,37 @@
             
             # As is usually the case, relation fields get the special treatment.
             if field.rel and isinstance(field.rel, models.ManyToManyRel):
-                m2m_data[field.name] = self._handle_m2m_field_node(field_node)
+                m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
             elif field.rel and isinstance(field.rel, models.ManyToOneRel):
-                data[field.name] = self._handle_fk_field_node(field_node)
+                data[field.attname] = self._handle_fk_field_node(field_node, field)
             else:
-                value = field.to_python(getInnerText(field_node).strip().encode(self.encoding))
+                if len(field_node.childNodes) == 1 and field_node.childNodes[0].nodeName == 'None':
+                    value = None
+                else:
+                    value = field.to_python(getInnerText(field_node).strip().encode(self.encoding))
                 data[field.name] = value
         
         # Return a DeserializedObject so that the m2m data has a place to live.
         return base.DeserializedObject(Model(**data), m2m_data)
         
-    def _handle_fk_field_node(self, node):
+    def _handle_fk_field_node(self, node, field):
         """
         Handle a <field> node for a ForeignKey
         """
-        # Try to set the foreign key by looking up the foreign related object.
-        # If it doesn't exist, set the field to None (which might trigger 
-        # validation error, but that's expected).
-        RelatedModel = self._get_model_from_node(node, "to")
-        return RelatedModel.objects.get(pk=getInnerText(node).strip().encode(self.encoding))
+        # Check if there is a child node named 'None', returning None if so.
+        if len(node.childNodes) == 1 and node.childNodes[0].nodeName == 'None':
+            return None
+        else:
+            return field.rel.to._meta.pk.to_python(
+                       getInnerText(node).strip().encode(self.encoding))
         
-    def _handle_m2m_field_node(self, node):
+    def _handle_m2m_field_node(self, node, field):
         """
         Handle a <field> node for a ManyToManyField
         """
-        # Load the related model
-        RelatedModel = self._get_model_from_node(node, "to")
-        
-        # Look up all the related objects. Using the in_bulk() lookup ensures
-        # that missing related objects don't cause an exception
-        related_ids = [c.getAttribute("pk").encode(self.encoding) for c in node.getElementsByTagName("object")]
-        return RelatedModel._default_manager.in_bulk(related_ids).values()
+        return [field.rel.to._meta.pk.to_python(
+                    c.getAttribute("pk").encode(self.encoding)) 
+                    for c in node.getElementsByTagName("object")]
     
     def _get_model_from_node(self, node, attr):
         """

Modified: cs/pythia/trunk/opal/core/servers/basehttp.py
===================================================================
--- cs/pythia/trunk/opal/core/servers/basehttp.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/servers/basehttp.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -547,10 +547,6 @@
 
         env['PATH_INFO'] = urllib.unquote(path)
         env['QUERY_STRING'] = query
-
-        host = self.address_string()
-        if host != self.client_address[0]:
-            env['REMOTE_HOST'] = host
         env['REMOTE_ADDR'] = self.client_address[0]
 
         if self.headers.typeheader is None:
@@ -598,11 +594,14 @@
     Use this ONLY LOCALLY, for development! This hasn't been tested for
     security and is not super efficient.
     """
-    def __init__(self, application):
+    def __init__(self, application, media_dir=None):
         from opal.conf import settings
-        import opal
         self.application = application
-        self.media_dir = opal.__path__[0] + '/contrib/admin/media'
+        if not media_dir:
+            import opal
+            self.media_dir = opal.__path__[0] + '/contrib/admin/media'
+        else:
+            self.media_dir = media_dir
         self.media_url = settings.ADMIN_MEDIA_PREFIX
 
     def __call__(self, environ, start_response):

Modified: cs/pythia/trunk/opal/core/servers/fastcgi.py
===================================================================
--- cs/pythia/trunk/opal/core/servers/fastcgi.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/servers/fastcgi.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -31,9 +31,11 @@
   port=PORTNUM         port to listen on.
   socket=FILE          UNIX socket to listen on.
   method=IMPL          prefork or threaded (default prefork)
-  maxspare=NUMBER      max number of spare processes to keep running.
-  minspare=NUMBER      min number of spare processes to prefork.
-  maxchildren=NUMBER   hard limit number of processes in prefork mode.
+  maxrequests=NUMBER   number of requests a child handles before it is 
+                       killed and a new child is forked (0 = no limit).
+  maxspare=NUMBER      max number of spare processes / threads
+  minspare=NUMBER      min number of spare processes / threads.
+  maxchildren=NUMBER   hard limit number of processes / threads
   daemonize=BOOL       whether to detach from terminal.
   pidfile=FILE         write the spawned process-id to this file.
   workdir=DIRECTORY    change to this directory when daemonizing
@@ -66,6 +68,7 @@
     'maxspare': 5,
     'minspare': 2,
     'maxchildren': 50,
+    'maxrequests': 0,
 }
 
 def fastcgi_help(message=None):
@@ -74,8 +77,9 @@
         print message
     return False
 
-def runfastcgi(argset):
+def runfastcgi(argset=[], **kwargs):
     options = FASTCGI_OPTIONS.copy()
+    options.update(kwargs)
     for x in argset:
         if "=" in x:
             k, v = x.split('=', 1)
@@ -102,15 +106,20 @@
             'maxSpare': int(options["maxspare"]),
             'minSpare': int(options["minspare"]),
             'maxChildren': int(options["maxchildren"]),
+            'maxRequests': int(options["maxrequests"]), 
         }
     elif options['method'] in ('thread', 'threaded'):
         from flup.server.fcgi import WSGIServer
-        wsgi_opts = {}
+        wsgi_opts = {
+            'maxSpare': int(options["maxspare"]),
+            'minSpare': int(options["minspare"]),
+            'maxThreads': int(options["maxchildren"]),
+        }
     else:
         return fastcgi_help("ERROR: Implementation must be one of prefork or thread.")
-    
+
     wsgi_opts['debug'] = False # Turn off flup tracebacks
-    
+
     # Prep up and go
     from opal.core.handlers.wsgi import WSGIHandler
 

Modified: cs/pythia/trunk/opal/core/urlresolvers.py
===================================================================
--- cs/pythia/trunk/opal/core/urlresolvers.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/urlresolvers.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -15,12 +15,16 @@
     pass
 
 class NoReverseMatch(Exception):
-    pass
+    # Don't make this raise an error when used in a template.
+    silent_variable_failure = True
 
 def get_mod_func(callback):
     # Converts 'opal.views.news.stories.story_detail' to
     # ['opal.views.news.stories', 'story_detail']
-    dot = callback.rindex('.')
+    try:
+        dot = callback.rindex('.')
+    except ValueError:
+        return callback, ''
     return callback[:dot], callback[dot+1:]
 
 def reverse_helper(regex, *args, **kwargs):
@@ -86,10 +90,15 @@
 class RegexURLPattern(object):
     def __init__(self, regex, callback, default_args=None):
         # regex is a string representing a regular expression.
-        # callback is something like 'foo.views.news.stories.story_detail',
-        # which represents the path to a module and a view function name.
+        # callback is either a string like 'foo.views.news.stories.story_detail'
+        # which represents the path to a module and a view function name, or a
+        # callable object (view).
         self.regex = re.compile(regex)
-        self.callback = callback
+        if callable(callback):
+            self._callback = callback
+        else:
+            self._callback = None
+            self._callback_str = callback
         self.default_args = default_args or {}
 
     def resolve(self, path):
@@ -101,41 +110,47 @@
             kwargs = match.groupdict()
             if kwargs:
                 args = ()
-            if not kwargs:
+            else:
                 args = match.groups()
             # In both cases, pass any extra_kwargs as **kwargs.
             kwargs.update(self.default_args)
 
-            try: # Lazily load self.func.
-                return self.func, args, kwargs
-            except AttributeError:
-                self.func = self.get_callback()
-            return self.func, args, kwargs
+            return self.callback, args, kwargs
 
-    def get_callback(self):
-        mod_name, func_name = get_mod_func(self.callback)
+    def _get_callback(self):
+        if self._callback is not None:
+            return self._callback
+        mod_name, func_name = get_mod_func(self._callback_str)
         try:
-            return getattr(__import__(mod_name, '', '', ['']), func_name)
+            self._callback = getattr(__import__(mod_name, {}, {}, ['']), func_name)
         except ImportError, e:
             raise ViewDoesNotExist, "Could not import %s. Error was: %s" % (mod_name, str(e))
         except AttributeError, e:
             raise ViewDoesNotExist, "Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e))
+        return self._callback
+    callback = property(_get_callback)
 
     def reverse(self, viewname, *args, **kwargs):
-        if viewname != self.callback:
+        mod_name, func_name = get_mod_func(viewname)
+        try:
+            lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+        except (ImportError, AttributeError):
             raise NoReverseMatch
+        if lookup_view != self.callback:
+            raise NoReverseMatch
         return self.reverse_helper(*args, **kwargs)
 
     def reverse_helper(self, *args, **kwargs):
         return reverse_helper(self.regex, *args, **kwargs)
 
 class RegexURLResolver(object):
-    def __init__(self, regex, urlconf_name):
+    def __init__(self, regex, urlconf_name, default_kwargs=None):
         # regex is a string representing a regular expression.
         # urlconf_name is a string representing the module containing urlconfs.
         self.regex = re.compile(regex)
         self.urlconf_name = urlconf_name
         self.callback = None
+        self.default_kwargs = default_kwargs or {}
 
     def resolve(self, path):
         tried = []
@@ -149,7 +164,8 @@
                     tried.extend([(pattern.regex.pattern + '   ' + t) for t in e.args[0]['tried']])
                 else:
                     if sub_match:
-                        return sub_match[0], sub_match[1], dict(match.groupdict(), **sub_match[2])
+                        sub_match_dict = dict(self.default_kwargs, **sub_match[2])
+                        return sub_match[0], sub_match[1], dict(match.groupdict(), **sub_match_dict)
                     tried.append(pattern.regex.pattern)
             raise Resolver404, {'tried': tried, 'path': new_path}
 
@@ -158,7 +174,7 @@
             return self._urlconf_module
         except AttributeError:
             try:
-                self._urlconf_module = __import__(self.urlconf_name, '', '', [''])
+                self._urlconf_module = __import__(self.urlconf_name, {}, {}, [''])
             except ValueError, e:
                 # Invalid urlconf_name, such as "foo.bar." (note trailing period)
                 raise ImproperlyConfigured, "Error while importing URLconf %r: %s" % (self.urlconf_name, e)
@@ -173,7 +189,7 @@
         callback = getattr(self.urlconf_module, 'handler%s' % view_type)
         mod_name, func_name = get_mod_func(callback)
         try:
-            return getattr(__import__(mod_name, '', '', ['']), func_name), {}
+            return getattr(__import__(mod_name, {}, {}, ['']), func_name), {}
         except (ImportError, AttributeError), e:
             raise ViewDoesNotExist, "Tried %s. Error was: %s" % (callback, str(e))
 
@@ -183,22 +199,28 @@
     def resolve500(self):
         return self._resolve_special('500')
 
-    def reverse(self, viewname, *args, **kwargs):
+    def reverse(self, lookup_view, *args, **kwargs):
+        if not callable(lookup_view):
+            mod_name, func_name = get_mod_func(lookup_view)
+            try:
+                lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+            except (ImportError, AttributeError):
+                raise NoReverseMatch
         for pattern in self.urlconf_module.urlpatterns:
             if isinstance(pattern, RegexURLResolver):
                 try:
-                    return pattern.reverse_helper(viewname, *args, **kwargs)
+                    return pattern.reverse_helper(lookup_view, *args, **kwargs)
                 except NoReverseMatch:
                     continue
-            elif pattern.callback == viewname:
+            elif pattern.callback == lookup_view:
                 try:
                     return pattern.reverse_helper(*args, **kwargs)
                 except NoReverseMatch:
                     continue
         raise NoReverseMatch
 
-    def reverse_helper(self, viewname, *args, **kwargs):
-        sub_match = self.reverse(viewname, *args, **kwargs)
+    def reverse_helper(self, lookup_view, *args, **kwargs):
+        sub_match = self.reverse(lookup_view, *args, **kwargs)
         result = reverse_helper(self.regex, *args, **kwargs)
         return result + sub_match
 

Modified: cs/pythia/trunk/opal/core/validators.py
===================================================================
--- cs/pythia/trunk/opal/core/validators.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/validators.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -8,12 +8,13 @@
 form field is required.
 """
 
+import urllib2
 from opal.conf import settings
 from opal.utils.translation import gettext, gettext_lazy, ngettext
 from opal.utils.functional import Promise, lazy
 import re
 
-_datere = r'(19|2\d)\d{2}-((?:0?[1-9])|(?:1[0-2]))-((?:0?[1-9])|(?:[12][0-9])|(?:3[0-1]))'
+_datere = r'\d{4}-\d{1,2}-\d{1,2}'
 _timere = r'(?:[01]?[0-9]|2[0-3]):[0-5][0-9](?::[0-5][0-9])?'
 alnum_re = re.compile(r'^\w+$')
 alnumurl_re = re.compile(r'^[-\w/]+$')
@@ -68,7 +69,7 @@
 
 def isSlug(field_data, all_data):
     if not slug_re.search(field_data):
-        raise ValidationError, "This value must contain only letters, numbers, underscores or hyphens."
+        raise ValidationError, gettext("This value must contain only letters, numbers, underscores or hyphens.")
 
 def isLowerCase(field_data, all_data):
     if field_data.lower() != field_data:
@@ -122,9 +123,30 @@
     if not field_data.isalpha():
         raise ValidationError, gettext("Only alphabetical characters are allowed here.")
 
+def _isValidDate(date_string):
+    """
+    A helper function used by isValidANSIDate and isValidANSIDatetime to
+    check if the date is valid.  The date string is assumed to already be in
+    YYYY-MM-DD format.
+    """
+    from datetime import date
+    # Could use time.strptime here and catch errors, but datetime.date below
+    # produces much friendlier error messages.
+    year, month, day = map(int, date_string.split('-'))
+    # This check is needed because strftime is used when saving the date
+    # value to the database, and strftime requires that the year be >=1900.
+    if year < 1900:
+        raise ValidationError, gettext('Year must be 1900 or later.')
+    try:
+        date(year, month, day)
+    except ValueError, e:
+        msg = gettext('Invalid date: %s') % gettext(str(e))
+        raise ValidationError, msg    
+
 def isValidANSIDate(field_data, all_data):
     if not ansi_date_re.search(field_data):
         raise ValidationError, gettext('Enter a valid date in YYYY-MM-DD format.')
+    _isValidDate(field_data)
 
 def isValidANSITime(field_data, all_data):
     if not ansi_time_re.search(field_data):
@@ -133,6 +155,7 @@
 def isValidANSIDatetime(field_data, all_data):
     if not ansi_datetime_re.search(field_data):
         raise ValidationError, gettext('Enter a valid date/time in YYYY-MM-DD HH:MM format.')
+    _isValidDate(field_data.split()[0])
 
 def isValidEmail(field_data, all_data):
     if not email_re.search(field_data):
@@ -202,18 +225,26 @@
     isWellFormedXml('<root>%s</root>' % field_data, all_data)
 
 def isExistingURL(field_data, all_data):
-    import urllib2
     try:
-        u = urllib2.urlopen(field_data)
+        headers = {
+            "Accept" : "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5",
+            "Accept-Language" : "en-us,en;q=0.5",
+            "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
+            "Connection" : "close",
+            "User-Agent": settings.URL_VALIDATOR_USER_AGENT
+            }
+        req = urllib2.Request(field_data,None, headers)
+        u = urllib2.urlopen(req)
     except ValueError:
-        raise ValidationError, gettext("Invalid URL: %s") % field_data
+        raise ValidationError, _("Invalid URL: %s") % field_data
     except urllib2.HTTPError, e:
         # 401s are valid; they just mean authorization is required.
-        if e.code not in ('401',):
-            raise ValidationError, gettext("The URL %s is a broken link.") % field_data
+        # 301 and 302 are redirects; they just mean look somewhere else.
+        if str(e.code) not in ('401','301','302'):
+            raise ValidationError, _("The URL %s is a broken link.") % field_data
     except: # urllib2.URLError, httplib.InvalidURL, etc.
-        raise ValidationError, gettext("The URL %s is a broken link.") % field_data
-
+        raise ValidationError, _("The URL %s is a broken link.") % field_data
+        
 def isValidUSState(field_data, all_data):
     "Checks that the given string is a valid two-letter U.S. state abbreviation"
     states = ['AA', 'AE', 'AK', 'AL', 'AP', 'AR', 'AS', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'FM', 'GA', 'GU', 'HI', 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MH', 'MI', 'MN', 'MO', 'MP', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'PR', 'PW', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI', 'VT', 'WA', 'WI', 'WV', 'WY']
@@ -227,9 +258,8 @@
     catch 'motherfucker' as well. Raises a ValidationError such as:
         Watch your mouth! The words "f--k" and "s--t" are not allowed here.
     """
-    bad_words = ['asshat', 'asshead', 'asshole', 'cunt', 'fuck', 'gook', 'nigger', 'shit'] # all in lower case
     field_data = field_data.lower() # normalize
-    words_seen = [w for w in bad_words if field_data.find(w) > -1]
+    words_seen = [w for w in settings.PROFANITIES_LIST if w in field_data]
     if words_seen:
         from opal.utils.text import get_text_list
         plural = len(words_seen) > 1
@@ -283,11 +313,12 @@
         RequiredIfOtherFieldsGiven.__init__(self, [other_field_name], error_message)
 
 class RequiredIfOtherFieldEquals(object):
-    def __init__(self, other_field, other_value, error_message=None):
+    def __init__(self, other_field, other_value, error_message=None, other_label=None):
         self.other_field = other_field
         self.other_value = other_value
+        other_label = other_label or other_value
         self.error_message = error_message or lazy_inter(gettext_lazy("This field must be given if %(field)s is %(value)s"), {
-            'field': other_field, 'value': other_value})
+            'field': other_field, 'value': other_label})
         self.always_test = True
 
     def __call__(self, field_data, all_data):
@@ -295,11 +326,12 @@
             raise ValidationError(self.error_message)
 
 class RequiredIfOtherFieldDoesNotEqual(object):
-    def __init__(self, other_field, other_value, error_message=None):
+    def __init__(self, other_field, other_value, other_label=None, error_message=None):
         self.other_field = other_field
         self.other_value = other_value
+        other_label = other_label or other_value
         self.error_message = error_message or lazy_inter(gettext_lazy("This field must be given if %(field)s is not %(value)s"), {
-            'field': other_field, 'value': other_value})
+            'field': other_field, 'value': other_label})
         self.always_test = True
 
     def __call__(self, field_data, all_data):
@@ -324,6 +356,38 @@
             if field_name != self.field_name and value == field_data:
                 raise ValidationError, self.error_message
 
+class NumberIsInRange(object):
+    """
+    Validator that tests if a value is in a range (inclusive).
+    """
+    def __init__(self, lower=None, upper=None, error_message=''):
+        self.lower, self.upper = lower, upper
+        if not error_message:
+            if lower and upper:
+                 self.error_message = gettext("This value must be between %(lower)s and %(upper)s.") % {'lower': lower, 'upper': upper}
+            elif lower:
+                self.error_message = gettext("This value must be at least %s.") % lower
+            elif upper:
+                self.error_message = gettext("This value must be no more than %s.") % upper
+        else:
+            self.error_message = error_message
+
+    def __call__(self, field_data, all_data):
+        # Try to make the value numeric. If this fails, we assume another 
+        # validator will catch the problem.
+        try:
+            val = float(field_data)
+        except ValueError:
+            return
+            
+        # Now validate
+        if self.lower and self.upper and (val < self.lower or val > self.upper):
+            raise ValidationError(self.error_message)
+        elif self.lower and val < self.lower:
+            raise ValidationError(self.error_message)
+        elif self.upper and val > self.upper:
+            raise ValidationError(self.error_message)
+
 class IsAPowerOf(object):
     """
     >>> v = IsAPowerOf(2)
@@ -352,10 +416,12 @@
             float(data)
         except ValueError:
             raise ValidationError, gettext("Please enter a valid decimal number.")
-        if len(data) > (self.max_digits + 1):
+        # Negative floats require more space to input.
+        max_allowed_length = data.startswith('-') and (self.max_digits + 2) or (self.max_digits + 1)
+        if len(data) > max_allowed_length:
             raise ValidationError, ngettext("Please enter a valid decimal number with at most %s total digit.",
                 "Please enter a valid decimal number with at most %s total digits.", self.max_digits) % self.max_digits
-        if (not '.' in data and len(data) > (self.max_digits - self.decimal_places)) or ('.' in data and len(data) > (self.max_digits - (self.decimal_places - len(data.split('.')[1])) + 1)):
+        if (not '.' in data and len(data) > (max_allowed_length - self.decimal_places - 1)) or ('.' in data and len(data) > (max_allowed_length - (self.decimal_places - len(data.split('.')[1])))):
             raise ValidationError, ngettext( "Please enter a valid decimal number with a whole part of at most %s digit.",
                 "Please enter a valid decimal number with a whole part of at most %s digits.", str(self.max_digits-self.decimal_places)) % str(self.max_digits-self.decimal_places)
         if '.' in data and len(data.split('.')[1]) > self.decimal_places:

Modified: cs/pythia/trunk/opal/core/xheaders.py
===================================================================
--- cs/pythia/trunk/opal/core/xheaders.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/core/xheaders.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -13,9 +13,10 @@
     """
     Adds the "X-Object-Type" and "X-Object-Id" headers to the given
     HttpResponse according to the given model and object_id -- but only if the
-    given HttpRequest object has an IP address within the INTERNAL_IPS setting.
+    given HttpRequest object has an IP address within the INTERNAL_IPS setting
+    or if the request is from a logged in staff member.
     """
     from opal.conf import settings
-    if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
+    if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (hasattr(request, 'user') and request.user.is_authenticated() and request.user.is_staff):
         response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
         response['X-Object-Id'] = str(object_id)

Modified: cs/pythia/trunk/opal/db/__init__.py
===================================================================
--- cs/pythia/trunk/opal/db/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -8,7 +8,7 @@
     settings.DATABASE_ENGINE = 'dummy'
 
 try:
-    backend = __import__('opal.db.backends.%s.base' % settings.DATABASE_ENGINE, '', '', [''])
+    backend = __import__('opal.db.backends.%s.base' % settings.DATABASE_ENGINE, {}, {}, [''])
 except ImportError, e:
     # The database backend wasn't found. Display a helpful error message
     # listing all possible database backends.
@@ -18,16 +18,16 @@
     available_backends = [f for f in os.listdir(backend_dir) if not f.startswith('_') and not f.startswith('.') and not f.endswith('.py') and not f.endswith('.pyc')]
     available_backends.sort()
     if settings.DATABASE_ENGINE not in available_backends:
-        raise ImproperlyConfigured, "%r isn't an available database backend. vailable options are: %s" % \
+        raise ImproperlyConfigured, "%r isn't an available database backend. Available options are: %s" % \
             (settings.DATABASE_ENGINE, ", ".join(map(repr, available_backends)))
     else:
         raise # If there's some other error, this must be an error in Django itself.
 
-get_introspection_module = lambda: __import__('opal.db.backends.%s.introspection' % settings.DATABASE_ENGINE, '', '', [''])
-get_creation_module = lambda: __import__('opal.db.backends.%s.creation' % settings.DATABASE_ENGINE, '', '', [''])
-runshell = lambda: __import__('opal.db.backends.%s.client' % settings.DATABASE_ENGINE, '', '', ['']).runshell()
+get_introspection_module = lambda: __import__('opal.db.backends.%s.introspection' % settings.DATABASE_ENGINE, {}, {}, [''])
+get_creation_module = lambda: __import__('opal.db.backends.%s.creation' % settings.DATABASE_ENGINE, {}, {}, [''])
+runshell = lambda: __import__('opal.db.backends.%s.client' % settings.DATABASE_ENGINE, {}, {}, ['']).runshell()
 
-connection = backend.DatabaseWrapper()
+connection = backend.DatabaseWrapper(**settings.DATABASE_OPTIONS)
 DatabaseError = backend.DatabaseError
 
 # Register an event that closes the database connection

Modified: cs/pythia/trunk/opal/db/backends/ado_mssql/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/ado_mssql/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/ado_mssql/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -55,7 +55,7 @@
     from opal.utils._threading_local import local
 
 class DatabaseWrapper(local):
-    def __init__(self):
+    def __init__(self, **kwargs):
         self.connection = None
         self.queries = []
 
@@ -76,10 +76,11 @@
         return cursor
 
     def _commit(self):
-        return self.connection.commit()
+        if self.connection is not None:
+            return self.connection.commit()
 
     def _rollback(self):
-        if self.connection:
+        if self.connection is not None:
             return self.connection.rollback()
 
     def close(self):
@@ -125,6 +126,9 @@
 def get_random_function_sql():
     return "RAND()"
 
+def get_deferrable_sql():
+    return " DEFERRABLE INITIALLY DEFERRED"
+
 def get_fulltext_search_sql(field_name):
     raise NotImplementedError
 
@@ -134,6 +138,19 @@
 def get_pk_default_value():
     return "DEFAULT"
 
+def get_sql_flush(sql_styler, full_table_list):
+    """Return a list of SQL statements required to remove all data from
+    all tables in the database (without actually removing the tables
+    themselves) and put the database in an empty 'initial' state
+    """
+    # Return a list of 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
+    # TODO - SQL not actually tested against ADO MSSQL yet!
+    # TODO - autoincrement indices reset required? See other get_sql_flush() implementations
+    sql_list = ['%s %s;' % \
+                (sql_styler.SQL_KEYWORD('TRUNCATE'),
+                 sql_styler.SQL_FIELD(quote_name(table))
+                 )  for table in full_table_list]
+
 OPERATOR_MAPPING = {
     'exact': '= %s',
     'iexact': 'LIKE %s',

Modified: cs/pythia/trunk/opal/db/backends/ado_mssql/creation.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/ado_mssql/creation.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/ado_mssql/creation.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -21,6 +21,5 @@
     'SmallIntegerField': 'smallint',
     'TextField':         'text',
     'TimeField':         'time',
-    'URLField':          'varchar(200)',
     'USStateField':      'varchar(2)',
 }

Modified: cs/pythia/trunk/opal/db/backends/dummy/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/dummy/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/dummy/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -20,6 +20,9 @@
     _commit = complain
     _rollback = complain
 
+    def __init__(self, **kwargs):
+        pass
+
     def close(self):
         pass # close()
 
@@ -33,6 +36,9 @@
 get_date_trunc_sql = complain
 get_limit_offset_sql = complain
 get_random_function_sql = complain
+get_deferrable_sql = complain
 get_fulltext_search_sql = complain
 get_drop_foreignkey_sql = complain
+get_sql_flush = complain
+
 OPERATOR_MAPPING = {}

Modified: cs/pythia/trunk/opal/db/backends/mysql/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/mysql/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/mysql/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -10,46 +10,46 @@
 except ImportError, e:
     from opal.core.exceptions import ImproperlyConfigured
     raise ImproperlyConfigured, "Error loading MySQLdb module: %s" % e
+
+# We want version (1, 2, 1, 'final', 2) or later. We can't just use
+# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
+# inadvertently passes the version test.
+version = Database.version_info
+if (version < (1,2,1) or (version[:3] == (1, 2, 1) and 
+        (len(version) < 5 or version[3] != 'final' or version[4] < 2))):
+    raise ImportError, "MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__
+
 from MySQLdb.converters import conversions
 from MySQLdb.constants import FIELD_TYPE
 import types
+import re
 
 DatabaseError = Database.DatabaseError
 
+# MySQLdb-1.2.1 supports the Python boolean type, and only uses datetime
+# module for time-related columns; older versions could have used mx.DateTime
+# or strings if there were no datetime module. However, MySQLdb still returns
+# TIME columns as timedelta -- they are more like timedelta in terms of actual
+# behavior as they are signed and include days -- and Django expects time, so
+# we still need to override that.
 opal_conversions = conversions.copy()
 opal_conversions.update({
-    types.BooleanType: util.rev_typecast_boolean,
-    FIELD_TYPE.DATETIME: util.typecast_timestamp,
-    FIELD_TYPE.DATE: util.typecast_date,
     FIELD_TYPE.TIME: util.typecast_time,
 })
 
-# This is an extra debug layer over MySQL queries, to display warnings.
-# It's only used when DEBUG=True.
-class MysqlDebugWrapper:
-    def __init__(self, cursor):
-        self.cursor = cursor
+# This should match the numerical portion of the version numbers (we can treat
+# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
+# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
+# http://dev.mysql.com/doc/refman/5.0/en/news.html .
+server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
 
-    def execute(self, sql, params=()):
-        try:
-            return self.cursor.execute(sql, params)
-        except Database.Warning, w:
-            self.cursor.execute("SHOW WARNINGS")
-            raise Database.Warning, "%s: %s" % (w, self.cursor.fetchall())
+# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
+# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
+# point is to raise Warnings as exceptions, this can be done with the Python
+# warning module, and this is setup when the connection is created, and the
+# standard util.CursorDebugWrapper can be used. Also, using sql_mode
+# TRADITIONAL will automatically cause most warnings to be treated as errors.
 
-    def executemany(self, sql, param_list):
-        try:
-            return self.cursor.executemany(sql, param_list)
-        except Database.Warning, w:
-            self.cursor.execute("SHOW WARNINGS")
-            raise Database.Warning, "%s: %s" % (w, self.cursor.fetchall())
-
-    def __getattr__(self, attr):
-        if self.__dict__.has_key(attr):
-            return self.__dict__[attr]
-        else:
-            return getattr(self.cursor, attr)
-
 try:
     # Only exists in Python 2.4+
     from threading import local
@@ -58,9 +58,11 @@
     from opal.utils._threading_local import local
 
 class DatabaseWrapper(local):
-    def __init__(self):
+    def __init__(self, **kwargs):
         self.connection = None
         self.queries = []
+        self.server_version = None
+        self.options = kwargs
 
     def _valid_connection(self):
         if self.connection is not None:
@@ -74,32 +76,41 @@
 
     def cursor(self):
         from opal.conf import settings
+        from warnings import filterwarnings
         if not self._valid_connection():
             kwargs = {
-                'user': settings.DATABASE_USER,
-                'db': settings.DATABASE_NAME,
-                'passwd': settings.DATABASE_PASSWORD,
                 'conv': opal_conversions,
+                'charset': 'utf8',
+                'use_unicode': False,
             }
+            if settings.DATABASE_USER:
+                kwargs['user'] = settings.DATABASE_USER
+            if settings.DATABASE_NAME:
+                kwargs['db'] = settings.DATABASE_NAME
+            if settings.DATABASE_PASSWORD:
+                kwargs['passwd'] = settings.DATABASE_PASSWORD
             if settings.DATABASE_HOST.startswith('/'):
                 kwargs['unix_socket'] = settings.DATABASE_HOST
-            else:
+            elif settings.DATABASE_HOST:
                 kwargs['host'] = settings.DATABASE_HOST
             if settings.DATABASE_PORT:
                 kwargs['port'] = int(settings.DATABASE_PORT)
+            kwargs.update(self.options)
             self.connection = Database.connect(**kwargs)
-        cursor = self.connection.cursor()
-        if self.connection.get_server_info() >= '4.1':
-            cursor.execute("SET NAMES 'utf8'")
+            cursor = self.connection.cursor()
+        else:
+            cursor = self.connection.cursor()
         if settings.DEBUG:
-            return util.CursorDebugWrapper(MysqlDebugWrapper(cursor), self)
+            filterwarnings("error", category=Database.Warning)
+            return util.CursorDebugWrapper(cursor, self)
         return cursor
 
     def _commit(self):
-        self.connection.commit()
+        if self.connection is not None:
+            self.connection.commit()
 
     def _rollback(self):
-        if self.connection:
+        if self.connection is not None:
             try:
                 self.connection.rollback()
             except Database.NotSupportedError:
@@ -110,6 +121,16 @@
             self.connection.close()
             self.connection = None
 
+    def get_server_version(self):
+        if not self.server_version:
+            if not self._valid_connection():
+                self.cursor()
+            m = server_version_re.match(self.connection.get_server_info())
+            if not m:
+                raise Exception('Unable to determine MySQL version from version string %r' % self.connection.get_server_info())
+            self.server_version = tuple([int(x) for x in m.groups()])
+        return self.server_version
+
 supports_constraints = True
 
 def quote_name(name):
@@ -152,6 +173,9 @@
 def get_random_function_sql():
     return "RAND()"
 
+def get_deferrable_sql():
+    return ""
+
 def get_fulltext_search_sql(field_name):
     return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
 
@@ -161,6 +185,36 @@
 def get_pk_default_value():
     return "DEFAULT"
 
+def get_sql_flush(style, tables, sequences):
+    """Return a list of SQL statements required to remove all data from
+    all tables in the database (without actually removing the tables
+    themselves) and put the database in an empty 'initial' state
+    
+    """
+    # NB: The generated SQL below is specific to MySQL
+    # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
+    # to clear all tables of all data
+    if tables:
+        sql = ['SET FOREIGN_KEY_CHECKS = 0;'] + \
+              ['%s %s;' % \
+                (style.SQL_KEYWORD('TRUNCATE'),
+                 style.SQL_FIELD(quote_name(table))
+                )  for table in tables] + \
+              ['SET FOREIGN_KEY_CHECKS = 1;']
+              
+        # 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements
+        # to reset sequence indices
+        sql.extend(["%s %s %s %s %s;" % \
+            (style.SQL_KEYWORD('ALTER'),
+             style.SQL_KEYWORD('TABLE'),
+             style.SQL_TABLE(quote_name(sequence['table'])),
+             style.SQL_KEYWORD('AUTO_INCREMENT'),
+             style.SQL_FIELD('= 1'),
+            ) for sequence in sequences])
+        return sql
+    else:
+        return []
+
 OPERATOR_MAPPING = {
     'exact': '= %s',
     'iexact': 'LIKE %s',

Modified: cs/pythia/trunk/opal/db/backends/mysql/client.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/mysql/client.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/mysql/client.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -3,12 +3,25 @@
 
 def runshell():
     args = ['']
-    args += ["--user=%s" % settings.DATABASE_USER]
-    if settings.DATABASE_PASSWORD:
-        args += ["--password=%s" % settings.DATABASE_PASSWORD]
-    if settings.DATABASE_HOST:
-        args += ["--host=%s" % settings.DATABASE_HOST]
-    if settings.DATABASE_PORT:
-        args += ["--port=%s" % settings.DATABASE_PORT]
-    args += [settings.DATABASE_NAME]
+    db = settings.DATABASE_OPTIONS.get('db', settings.DATABASE_NAME)
+    user = settings.DATABASE_OPTIONS.get('user', settings.DATABASE_USER)
+    passwd = settings.DATABASE_OPTIONS.get('passwd', settings.DATABASE_PASSWORD)
+    host = settings.DATABASE_OPTIONS.get('host', settings.DATABASE_HOST)
+    port = settings.DATABASE_OPTIONS.get('port', settings.DATABASE_PORT)
+    defaults_file = settings.DATABASE_OPTIONS.get('read_default_file')
+    # Seems to be no good way to set sql_mode with CLI
+    
+    if defaults_file:
+        args += ["--defaults-file=%s" % defaults_file]
+    if user:
+        args += ["--user=%s" % user]
+    if passwd:
+        args += ["--password=%s" % passwd]
+    if host:
+        args += ["--host=%s" % host]
+    if port:
+        args += ["--port=%s" % port]
+    if db:
+        args += [db]
+
     os.execvp('mysql', args)

Modified: cs/pythia/trunk/opal/db/backends/mysql/creation.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/mysql/creation.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/mysql/creation.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -25,6 +25,5 @@
     'SmallIntegerField': 'smallint',
     'TextField':         'longtext',
     'TimeField':         'time',
-    'URLField':          'varchar(200)',
     'USStateField':      'varchar(2)',
 }

Modified: cs/pythia/trunk/opal/db/backends/mysql/introspection.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/mysql/introspection.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/mysql/introspection.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -36,13 +36,14 @@
             SELECT column_name, referenced_table_name, referenced_column_name
             FROM information_schema.key_column_usage
             WHERE table_name = %s
+                AND table_schema = DATABASE()
                 AND referenced_table_name IS NOT NULL
                 AND referenced_column_name IS NOT NULL""", [table_name])
         constraints.extend(cursor.fetchall())
     except (ProgrammingError, OperationalError):
         # Fall back to "SHOW CREATE TABLE", for previous MySQL versions.
         # Go through all constraints and save the equal matches.
-        cursor.execute("SHOW CREATE TABLE %s" % table_name)
+        cursor.execute("SHOW CREATE TABLE %s" % quote_name(table_name))
         for row in cursor.fetchall():
             pos = 0
             while True:

Copied: cs/pythia/trunk/opal/db/backends/mysql_old (from rev 7719, vendor/django/current/django/db/backends/mysql_old)

Modified: cs/pythia/trunk/opal/db/backends/oracle/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/oracle/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/oracle/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -21,9 +21,10 @@
     from opal.utils._threading_local import local
 
 class DatabaseWrapper(local):
-    def __init__(self):
+    def __init__(self, **kwargs):
         self.connection = None
         self.queries = []
+        self.options = kwargs
 
     def _valid_connection(self):
         return self.connection is not None
@@ -35,17 +36,18 @@
                 settings.DATABASE_HOST = 'localhost'
             if len(settings.DATABASE_PORT.strip()) != 0:
                 dsn = Database.makedsn(settings.DATABASE_HOST, int(settings.DATABASE_PORT), settings.DATABASE_NAME)
-                self.connection = Database.connect(settings.DATABASE_USER, settings.DATABASE_PASSWORD, dsn)
+                self.connection = Database.connect(settings.DATABASE_USER, settings.DATABASE_PASSWORD, dsn, **self.options)
             else:
                 conn_string = "%s/%s@%s" % (settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME)
-                self.connection = Database.connect(conn_string)
+                self.connection = Database.connect(conn_string, **self.options)
         return FormatStylePlaceholderCursor(self.connection)
 
     def _commit(self):
-        self.connection.commit()
+        if self.connection is not None:
+            self.connection.commit()
 
     def _rollback(self):
-        if self.connection:
+        if self.connection is not None:
             try:
                 self.connection.rollback()
             except Database.NotSupportedError:
@@ -107,6 +109,9 @@
 def get_random_function_sql():
     return "DBMS_RANDOM.RANDOM"
 
+def get_deferrable_sql():
+    return " DEFERRABLE INITIALLY DEFERRED"
+
 def get_fulltext_search_sql(field_name):
     raise NotImplementedError
 
@@ -116,6 +121,20 @@
 def get_pk_default_value():
     return "DEFAULT"
 
+def get_sql_flush(style, tables, sequences):
+    """Return a list of SQL statements required to remove all data from
+    all tables in the database (without actually removing the tables
+    themselves) and put the database in an empty 'initial' state
+    """
+    # Return a list of 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
+    # TODO - SQL not actually tested against Oracle yet!
+    # TODO - autoincrement indices reset required? See other get_sql_flush() implementations
+    sql = ['%s %s;' % \
+            (style.SQL_KEYWORD('TRUNCATE'),
+             style.SQL_FIELD(quote_name(table))
+             )  for table in tables]
+
+
 OPERATOR_MAPPING = {
     'exact': '= %s',
     'iexact': 'LIKE %s',

Modified: cs/pythia/trunk/opal/db/backends/oracle/creation.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/oracle/creation.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/oracle/creation.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -21,6 +21,5 @@
     'SmallIntegerField': 'smallint',
     'TextField':         'long',
     'TimeField':         'timestamp',
-    'URLField':          'varchar(200)',
     'USStateField':      'varchar(2)',
 }

Modified: cs/pythia/trunk/opal/db/backends/postgresql/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/postgresql/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/postgresql/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -20,14 +20,51 @@
     # Import copy of _thread_local.py from Python 2.4
     from opal.utils._threading_local import local
 
+def smart_basestring(s, charset):
+    if isinstance(s, unicode):
+        return s.encode(charset)
+    return s
+
+class UnicodeCursorWrapper(object):
+    """
+    A thin wrapper around psycopg cursors that allows them to accept Unicode
+    strings as params.
+
+    This is necessary because psycopg doesn't apply any DB quoting to
+    parameters that are Unicode strings. If a param is Unicode, this will
+    convert it to a bytestring using DEFAULT_CHARSET before passing it to
+    psycopg.
+    """
+    def __init__(self, cursor, charset):
+        self.cursor = cursor
+        self.charset = charset
+
+    def execute(self, sql, params=()):
+        return self.cursor.execute(sql, [smart_basestring(p, self.charset) for p in params])
+
+    def executemany(self, sql, param_list):
+        new_param_list = [tuple([smart_basestring(p, self.charset) for p in params]) for params in param_list]
+        return self.cursor.executemany(sql, new_param_list)
+
+    def __getattr__(self, attr):
+        if self.__dict__.has_key(attr):
+            return self.__dict__[attr]
+        else:
+            return getattr(self.cursor, attr)
+
+postgres_version = None
+
 class DatabaseWrapper(local):
-    def __init__(self):
+    def __init__(self, **kwargs):
         self.connection = None
         self.queries = []
+        self.options = kwargs
 
     def cursor(self):
         from opal.conf import settings
+        set_tz = False
         if self.connection is None:
+            set_tz = True
             if settings.DATABASE_NAME == '':
                 from opal.core.exceptions import ImproperlyConfigured
                 raise ImproperlyConfigured, "You need to specify DATABASE_NAME in your Django settings file."
@@ -40,19 +77,26 @@
                 conn_string += " host=%s" % settings.DATABASE_HOST
             if settings.DATABASE_PORT:
                 conn_string += " port=%s" % settings.DATABASE_PORT
-            self.connection = Database.connect(conn_string)
+            self.connection = Database.connect(conn_string, **self.options)
             self.connection.set_isolation_level(1) # make transactions transparent to all cursors
         cursor = self.connection.cursor()
-        cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE])
+        if set_tz:
+            cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE])
+        cursor = UnicodeCursorWrapper(cursor, settings.DEFAULT_CHARSET)
+        global postgres_version
+        if not postgres_version:
+            cursor.execute("SELECT version()")
+            postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')]        
         if settings.DEBUG:
             return util.CursorDebugWrapper(cursor, self)
         return cursor
 
     def _commit(self):
-        return self.connection.commit()
+        if self.connection is not None:
+            return self.connection.commit()
 
     def _rollback(self):
-        if self.connection:
+        if self.connection is not None:
             return self.connection.rollback()
 
     def close(self):
@@ -102,6 +146,9 @@
 def get_random_function_sql():
     return "RANDOM()"
 
+def get_deferrable_sql():
+    return " DEFERRABLE INITIALLY DEFERRED"
+    
 def get_fulltext_search_sql(field_name):
     raise NotImplementedError
 
@@ -111,13 +158,69 @@
 def get_pk_default_value():
     return "DEFAULT"
 
+def get_sql_flush(style, tables, sequences):
+    """Return a list of SQL statements required to remove all data from
+    all tables in the database (without actually removing the tables
+    themselves) and put the database in an empty 'initial' state
+    
+    """    
+    if tables:
+        if postgres_version[0] >= 8 and postgres_version[1] >= 1:
+            # Postgres 8.1+ can do 'TRUNCATE x, y, z...;'. In fact, it *has to* in order to be able to
+            # truncate tables referenced by a foreign key in any other table. The result is a
+            # single SQL TRUNCATE statement.
+            sql = ['%s %s;' % \
+                (style.SQL_KEYWORD('TRUNCATE'),
+                 style.SQL_FIELD(', '.join([quote_name(table) for table in tables]))
+            )]
+        else:
+            # Older versions of Postgres can't do TRUNCATE in a single call, so they must use 
+            # a simple delete.
+            sql = ['%s %s %s;' % \
+                    (style.SQL_KEYWORD('DELETE'),
+                     style.SQL_KEYWORD('FROM'),
+                     style.SQL_FIELD(quote_name(table))
+                     ) for table in tables]
+
+        # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
+        # to reset sequence indices
+        for sequence_info in sequences:
+            table_name = sequence_info['table']
+            column_name = sequence_info['column']
+            if column_name and len(column_name)>0:
+                # sequence name in this case will be <table>_<column>_seq
+                sql.append("%s %s %s %s %s %s;" % \
+                    (style.SQL_KEYWORD('ALTER'),
+                    style.SQL_KEYWORD('SEQUENCE'),
+                    style.SQL_FIELD('%s_%s_seq' % (table_name, column_name)),
+                    style.SQL_KEYWORD('RESTART'),
+                    style.SQL_KEYWORD('WITH'),
+                    style.SQL_FIELD('1')
+                    )
+                )
+            else:
+                # sequence name in this case will be <table>_id_seq
+                sql.append("%s %s %s %s %s %s;" % \
+                    (style.SQL_KEYWORD('ALTER'),
+                     style.SQL_KEYWORD('SEQUENCE'),
+                     style.SQL_FIELD('%s_id_seq' % table_name),
+                     style.SQL_KEYWORD('RESTART'),
+                     style.SQL_KEYWORD('WITH'),
+                     style.SQL_FIELD('1')
+                     )
+                )
+        return sql
+    else:
+        return []
+
+        
 # Register these custom typecasts, because Django expects dates/times to be
 # in Python's native (standard-library) datetime/time format, whereas psycopg
 # use mx.DateTime by default.
 try:
     Database.register_type(Database.new_type((1082,), "DATE", util.typecast_date))
 except AttributeError:
-    raise Exception, "You appear to be using psycopg version 2, which isn't supported yet, because it's still in beta. Use psycopg version 1 instead: http://initd.org/projects/psycopg1"
+    raise Exception, "You appear to be using psycopg version 2. Set your DATABASE_ENGINE to 'postgresql_psycopg2' instead of 'postgresql'."
 Database.register_type(Database.new_type((1083,1266), "TIME", util.typecast_time))
 Database.register_type(Database.new_type((1114,1184), "TIMESTAMP", util.typecast_timestamp))
 Database.register_type(Database.new_type((16,), "BOOLEAN", util.typecast_boolean))

Modified: cs/pythia/trunk/opal/db/backends/postgresql/creation.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/postgresql/creation.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/postgresql/creation.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -25,6 +25,5 @@
     'SmallIntegerField': 'smallint',
     'TextField':         'text',
     'TimeField':         'time',
-    'URLField':          'varchar(200)',
     'USStateField':      'varchar(2)',
 }

Modified: cs/pythia/trunk/opal/db/backends/postgresql_psycopg2/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/postgresql_psycopg2/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/postgresql_psycopg2/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -20,14 +20,19 @@
     # Import copy of _thread_local.py from Python 2.4
     from opal.utils._threading_local import local
 
+postgres_version = None
+
 class DatabaseWrapper(local):
-    def __init__(self):
+    def __init__(self, **kwargs):
         self.connection = None
         self.queries = []
+        self.options = kwargs
 
     def cursor(self):
         from opal.conf import settings
+        set_tz = False
         if self.connection is None:
+            set_tz = True
             if settings.DATABASE_NAME == '':
                 from opal.core.exceptions import ImproperlyConfigured
                 raise ImproperlyConfigured, "You need to specify DATABASE_NAME in your Django settings file."
@@ -40,19 +45,26 @@
                 conn_string += " host=%s" % settings.DATABASE_HOST
             if settings.DATABASE_PORT:
                 conn_string += " port=%s" % settings.DATABASE_PORT
-            self.connection = Database.connect(conn_string)
+            self.connection = Database.connect(conn_string, **self.options)
             self.connection.set_isolation_level(1) # make transactions transparent to all cursors
         cursor = self.connection.cursor()
-        cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE])
+        cursor.tzinfo_factory = None
+        if set_tz:
+            cursor.execute("SET TIME ZONE %s", [settings.TIME_ZONE])
+        global postgres_version
+        if not postgres_version:
+            cursor.execute("SELECT version()")
+            postgres_version = [int(val) for val in cursor.fetchone()[0].split()[1].split('.')]        
         if settings.DEBUG:
             return util.CursorDebugWrapper(cursor, self)
         return cursor
 
     def _commit(self):
-        return self.connection.commit()
+        if self.connection is not None:
+            return self.connection.commit()
 
     def _rollback(self):
-        if self.connection:
+        if self.connection is not None:
             return self.connection.rollback()
 
     def close(self):
@@ -67,24 +79,10 @@
         return name # Quoting once is enough.
     return '"%s"' % name
 
-def dictfetchone(cursor):
-    "Returns a row from the cursor as a dict"
-    # TODO: cursor.dictfetchone() doesn't exist in psycopg2,
-    # but no Django code uses this. Safe to remove?
-    return cursor.dictfetchone()
+dictfetchone = util.dictfetchone
+dictfetchmany = util.dictfetchmany
+dictfetchall = util.dictfetchall
 
-def dictfetchmany(cursor, number):
-    "Returns a certain number of rows from a cursor as a dict"
-    # TODO: cursor.dictfetchmany() doesn't exist in psycopg2,
-    # but no Django code uses this. Safe to remove?
-    return cursor.dictfetchmany(number)
-
-def dictfetchall(cursor):
-    "Returns all rows from a cursor as a dict"
-    # TODO: cursor.dictfetchall() doesn't exist in psycopg2,
-    # but no Django code uses this. Safe to remove?
-    return cursor.dictfetchall()
-
 def get_last_insert_id(cursor, table_name, pk_name):
     cursor.execute("SELECT CURRVAL('\"%s_%s_seq\"')" % (table_name, pk_name))
     return cursor.fetchone()[0]
@@ -108,6 +106,9 @@
 def get_random_function_sql():
     return "RANDOM()"
 
+def get_deferrable_sql():
+    return " DEFERRABLE INITIALLY DEFERRED"
+
 def get_fulltext_search_sql(field_name):
     raise NotImplementedError
 
@@ -117,6 +118,58 @@
 def get_pk_default_value():
     return "DEFAULT"
 
+def get_sql_flush(style, tables, sequences):
+    """Return a list of SQL statements required to remove all data from
+    all tables in the database (without actually removing the tables
+    themselves) and put the database in an empty 'initial' state
+    """
+    if tables:
+        if postgres_version[0] >= 8 and postgres_version[1] >= 1:
+            # Postgres 8.1+ can do 'TRUNCATE x, y, z...;'. In fact, it *has to* in order to be able to
+            # truncate tables referenced by a foreign key in any other table. The result is a
+            # single SQL TRUNCATE statement
+            sql = ['%s %s;' % \
+                    (style.SQL_KEYWORD('TRUNCATE'),
+                     style.SQL_FIELD(', '.join([quote_name(table) for table in tables]))
+                    )]
+        else:
+            sql = ['%s %s %s;' % \
+                    (style.SQL_KEYWORD('DELETE'),
+                     style.SQL_KEYWORD('FROM'),
+                     style.SQL_FIELD(quote_name(table))
+                     ) for table in tables]
+                     
+        # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
+        # to reset sequence indices
+        for sequence in sequences:
+            table_name = sequence['table']
+            column_name = sequence['column']
+            if column_name and len(column_name) > 0:
+                # sequence name in this case will be <table>_<column>_seq
+                sql.append("%s %s %s %s %s %s;" % \
+                    (style.SQL_KEYWORD('ALTER'),
+                     style.SQL_KEYWORD('SEQUENCE'),
+                     style.SQL_FIELD('%s_%s_seq' % (table_name, column_name)),
+                     style.SQL_KEYWORD('RESTART'),
+                     style.SQL_KEYWORD('WITH'),
+                     style.SQL_FIELD('1')
+                     )
+                )
+            else:
+                # sequence name in this case will be <table>_id_seq
+                sql.append("%s %s %s %s %s %s;" % \
+                    (style.SQL_KEYWORD('ALTER'),
+                     style.SQL_KEYWORD('SEQUENCE'),
+                     style.SQL_FIELD('%s_id_seq' % table_name),
+                     style.SQL_KEYWORD('RESTART'),
+                     style.SQL_KEYWORD('WITH'),
+                     style.SQL_FIELD('1')
+                     )
+                )
+        return sql
+    else:
+        return []
+        
 OPERATOR_MAPPING = {
     'exact': '= %s',
     'iexact': 'ILIKE %s',

Modified: cs/pythia/trunk/opal/db/backends/sqlite3/base.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/sqlite3/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/sqlite3/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -4,10 +4,18 @@
 
 from opal.db.backends import util
 try:
-    from pysqlite2 import dbapi2 as Database
+    try:
+        from sqlite3 import dbapi2 as Database
+    except ImportError:
+        from pysqlite2 import dbapi2 as Database
 except ImportError, e:
+    import sys
     from opal.core.exceptions import ImproperlyConfigured
-    raise ImproperlyConfigured, "Error loading pysqlite2 module: %s" % e
+    if sys.version_info < (2, 5, 0):
+        module = 'pysqlite2'
+    else:
+        module = 'sqlite3'
+    raise ImproperlyConfigured, "Error loading %s module: %s" % (module, e)
 
 DatabaseError = Database.DatabaseError
 
@@ -34,16 +42,20 @@
     from opal.utils._threading_local import local
 
 class DatabaseWrapper(local):
-    def __init__(self):
+    def __init__(self, **kwargs):
         self.connection = None
         self.queries = []
+        self.options = kwargs
 
     def cursor(self):
         from opal.conf import settings
         if self.connection is None:
-            self.connection = Database.connect(settings.DATABASE_NAME,
-                detect_types=Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES)
-
+            kwargs = {
+                'database': settings.DATABASE_NAME,
+                'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
+            }
+            kwargs.update(self.options)
+            self.connection = Database.connect(**kwargs)
             # Register extract and date_trunc functions.
             self.connection.create_function("opal_extract", 2, _sqlite_extract)
             self.connection.create_function("opal_date_trunc", 2, _sqlite_date_trunc)
@@ -55,14 +67,18 @@
             return cursor
 
     def _commit(self):
-        self.connection.commit()
+        if self.connection is not None:
+            self.connection.commit()
 
     def _rollback(self):
-        if self.connection:
+        if self.connection is not None:
             self.connection.rollback()
 
     def close(self):
-        if self.connection is not None:
+        from opal.conf import settings
+        # If database is in memory, closing the connection destroys the database.
+        # To prevent accidental data loss, ignore close requests on an in-memory db.
+        if self.connection is not None and settings.DATABASE_NAME != ":memory:":
             self.connection.close()
             self.connection = None
 
@@ -124,6 +140,9 @@
 def get_random_function_sql():
     return "RANDOM()"
 
+def get_deferrable_sql():
+    return ""
+
 def get_fulltext_search_sql(field_name):
     raise NotImplementedError
 
@@ -133,6 +152,24 @@
 def get_pk_default_value():
     return "NULL"
 
+def get_sql_flush(style, tables, sequences):
+    """Return a list of SQL statements required to remove all data from
+    all tables in the database (without actually removing the tables
+    themselves) and put the database in an empty 'initial' state
+    
+    """
+    # NB: The generated SQL below is specific to SQLite
+    # Note: The DELETE FROM... SQL generated below works for SQLite databases
+    # because constraints don't exist
+    sql = ['%s %s %s;' % \
+            (style.SQL_KEYWORD('DELETE'),
+             style.SQL_KEYWORD('FROM'),
+             style.SQL_FIELD(quote_name(table))
+             ) for table in tables]
+    # Note: No requirement for reset of auto-incremented indices (cf. other
+    # get_sql_flush() implementations). Just return SQL at this point
+    return sql
+
 def _sqlite_date_trunc(lookup_type, dt):
     try:
         dt = util.typecast_timestamp(dt)

Modified: cs/pythia/trunk/opal/db/backends/sqlite3/creation.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/sqlite3/creation.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/sqlite3/creation.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -24,6 +24,5 @@
     'SmallIntegerField':            'smallint',
     'TextField':                    'text',
     'TimeField':                    'time',
-    'URLField':                     'varchar(200)',
     'USStateField':                 'varchar(2)',
 }

Modified: cs/pythia/trunk/opal/db/backends/util.py
===================================================================
--- cs/pythia/trunk/opal/db/backends/util.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/backends/util.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -17,7 +17,7 @@
             if not isinstance(params, (tuple, dict)):
                 params = tuple(params)
             self.db.queries.append({
-                'sql': sql % tuple(params),
+                'sql': sql % params,
                 'time': "%.3f" % (stop - start),
             })
 
@@ -98,7 +98,7 @@
 
 def _dict_helper(desc, row):
     "Returns a dictionary for the given cursor.description and result row."
-    return dict([(desc[col[0]][0], col[1]) for col in enumerate(row)])
+    return dict(zip([col[0] for col in desc], row))
 
 def dictfetchone(cursor):
     "Returns a row from the cursor as a dict"
@@ -110,9 +110,11 @@
 def dictfetchmany(cursor, number):
     "Returns a certain number of rows from a cursor as a dict"
     desc = cursor.description
-    return [_dict_helper(desc, row) for row in cursor.fetchmany(number)]
+    for row in cursor.fetchmany(number):
+        yield _dict_helper(desc, row)
 
 def dictfetchall(cursor):
     "Returns all rows from a cursor as a dict"
     desc = cursor.description
-    return [_dict_helper(desc, row) for row in cursor.fetchall()]
+    for row in cursor.fetchall():
+        yield _dict_helper(desc, row)

Modified: cs/pythia/trunk/opal/db/models/__init__.py
===================================================================
--- cs/pythia/trunk/opal/db/models/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -25,7 +25,7 @@
     def inner(*args, **kwargs):
         bits = func(*args, **kwargs)
         viewname = bits[0]
-        return reverse(bits[0], None, *bits[1:2])
+        return reverse(bits[0], None, *bits[1:3])
     return inner
 
 class LazyDate(object):
@@ -47,7 +47,12 @@
         return "<LazyDate: %s>" % self.delta
 
     def __get_value__(self):
-        return datetime.datetime.now() + self.delta
+        return (datetime.datetime.now() + self.delta).date()
 
     def __getattr__(self, attr):
+        if attr == 'delta':
+            # To fix ticket #3377. Note that normal accesses to LazyDate.delta
+            # (after construction) will still work, because they don't go
+            # through __getattr__). This is mainly needed for unpickling.
+            raise AttributeError
         return getattr(self.__get_value__(), attr)

Modified: cs/pythia/trunk/opal/db/models/base.py
===================================================================
--- cs/pythia/trunk/opal/db/models/base.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/base.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -13,6 +13,7 @@
 from opal.utils.datastructures import SortedDict
 from opal.utils.functional import curry
 from opal.conf import settings
+from itertools import izip
 import types
 import sys
 import os
@@ -21,8 +22,8 @@
     "Metaclass for all models"
     def __new__(cls, name, bases, attrs):
         # If this isn't a subclass of Model, don't do anything special.
-        if not bases or bases == (object,):
-            return type.__new__(cls, name, bases, attrs)
+        if name == 'Model' or not filter(lambda b: issubclass(b, Model), bases):
+            return super(ModelBase, cls).__new__(cls, name, bases, attrs)
 
         # Create the class.
         new_class = type.__new__(cls, name, bases, {'__module__': attrs.pop('__module__')})
@@ -44,7 +45,7 @@
             new_class._meta.app_label = model_module.__name__.split('.')[-2]
 
         # Bail out early if we have already created this class.
-        m = get_model(new_class._meta.app_label, name)
+        m = get_model(new_class._meta.app_label, name, False)
         if m is not None:
             return m
 
@@ -68,7 +69,7 @@
         # the first class for this model to register with the framework. There
         # should only be one class for each model, so we must always return the
         # registered version.
-        return get_model(new_class._meta.app_label, name)
+        return get_model(new_class._meta.app_label, name, False)
 
 class Model(object):
     __metaclass__ = ModelBase
@@ -90,41 +91,74 @@
 
     def __init__(self, *args, **kwargs):
         dispatcher.send(signal=signals.pre_init, sender=self.__class__, args=args, kwargs=kwargs)
-        for f in self._meta.fields:
-            if isinstance(f.rel, ManyToOneRel):
-                try:
-                    # Assume object instance was passed in.
-                    rel_obj = kwargs.pop(f.name)
-                except KeyError:
+        
+        # There is a rather weird disparity here; if kwargs, it's set, then args
+        # overrides it. It should be one or the other; don't duplicate the work 
+        # The reason for the kwargs check is that standard iterator passes in by
+        # args, and nstantiation for iteration is 33% faster.
+        args_len = len(args)
+        if args_len > len(self._meta.fields):
+            # Daft, but matches old exception sans the err msg.
+            raise IndexError("Number of args exceeds number of fields")
+
+        fields_iter = iter(self._meta.fields)
+        if not kwargs:
+            # The ordering of the izip calls matter - izip throws StopIteration
+            # when an iter throws it. So if the first iter throws it, the second
+            # is *not* consumed. We rely on this, so don't change the order
+            # without changing the logic.
+            for val, field in izip(args, fields_iter):
+                setattr(self, field.attname, val)
+        else:
+            # Slower, kwargs-ready version.
+            for val, field in izip(args, fields_iter):
+                setattr(self, field.attname, val)
+                kwargs.pop(field.name, None)
+                # Maintain compatibility with existing calls.
+                if isinstance(field.rel, ManyToOneRel):
+                    kwargs.pop(field.attname, None)
+        
+        # Now we're left with the unprocessed fields that *must* come from
+        # keywords, or default.
+        
+        for field in fields_iter:
+            if kwargs:
+                if isinstance(field.rel, ManyToOneRel):
                     try:
-                        # Object instance wasn't passed in -- must be an ID.
-                        val = kwargs.pop(f.attname)
+                        # Assume object instance was passed in.
+                        rel_obj = kwargs.pop(field.name)
                     except KeyError:
-                        val = f.get_default()
+                        try:
+                            # Object instance wasn't passed in -- must be an ID.
+                            val = kwargs.pop(field.attname)
+                        except KeyError:
+                            val = field.get_default()
+                    else:
+                        # Object instance was passed in. Special case: You can
+                        # pass in "None" for related objects if it's allowed.
+                        if rel_obj is None and field.null:
+                            val = None
+                        else:
+                            try:
+                                val = getattr(rel_obj, field.rel.get_related_field().attname)
+                            except AttributeError:
+                                raise TypeError("Invalid value: %r should be a %s instance, not a %s" % 
+                                    (field.name, field.rel.to, type(rel_obj)))
                 else:
-                    # Object instance was passed in.
-                    # Special case: You can pass in "None" for related objects if it's allowed.
-                    if rel_obj is None and f.null:
-                        val = None
-                    else:
-                        try:
-                            val = getattr(rel_obj, f.rel.get_related_field().attname)
-                        except AttributeError:
-                            raise TypeError, "Invalid value: %r should be a %s instance, not a %s" % (f.name, f.rel.to, type(rel_obj))
-                setattr(self, f.attname, val)
+                    val = kwargs.pop(field.attname, field.get_default())
             else:
-                val = kwargs.pop(f.attname, f.get_default())
-                setattr(self, f.attname, val)
-        for prop in kwargs.keys():
-            try:
-                if isinstance(getattr(self.__class__, prop), property):
-                    setattr(self, prop, kwargs.pop(prop))
-            except AttributeError:
-                pass
+                val = field.get_default()
+            setattr(self, field.attname, val)
+
         if kwargs:
-            raise TypeError, "'%s' is an invalid keyword argument for this function" % kwargs.keys()[0]
-        for i, arg in enumerate(args):
-            setattr(self, self._meta.fields[i].attname, arg)
+            for prop in kwargs.keys():
+                try:
+                    if isinstance(getattr(self.__class__, prop), property):
+                        setattr(self, prop, kwargs.pop(prop))
+                except AttributeError:
+                    pass
+            if kwargs:
+                raise TypeError, "'%s' is an invalid keyword argument for this function" % kwargs.keys()[0]
         dispatcher.send(signal=signals.post_init, sender=self.__class__, instance=self)
 
     def add_to_class(cls, name, value):
@@ -176,11 +210,12 @@
             # If it does already exist, do an UPDATE.
             if cursor.fetchone():
                 db_values = [f.get_db_prep_save(f.pre_save(self, False)) for f in non_pks]
-                cursor.execute("UPDATE %s SET %s WHERE %s=%%s" % \
-                    (backend.quote_name(self._meta.db_table),
-                    ','.join(['%s=%%s' % backend.quote_name(f.column) for f in non_pks]),
-                    backend.quote_name(self._meta.pk.column)),
-                    db_values + [pk_val])
+                if db_values:
+                    cursor.execute("UPDATE %s SET %s WHERE %s=%%s" % \
+                        (backend.quote_name(self._meta.db_table),
+                        ','.join(['%s=%%s' % backend.quote_name(f.column) for f in non_pks]),
+                        backend.quote_name(self._meta.pk.column)),
+                        db_values + [pk_val])
             else:
                 record_exists = False
         if not pk_set or not record_exists:
@@ -321,7 +356,7 @@
     def _get_FIELD_size(self, field):
         return os.path.getsize(self._get_FIELD_filename(field))
 
-    def _save_FIELD_file(self, field, filename, raw_contents):
+    def _save_FIELD_file(self, field, filename, raw_contents, save=True):
         directory = field.get_directory_name()
         try: # Create the date-based directory if it doesn't exist.
             os.makedirs(os.path.join(settings.MEDIA_ROOT, directory))
@@ -356,8 +391,9 @@
             if field.height_field:
                 setattr(self, field.height_field, height)
 
-        # Save the object, because it has changed.
-        self.save()
+        # Save the object because it has changed unless save is False
+        if save:
+            self.save()
 
     _save_FIELD_file.alters_data = True
 
@@ -375,24 +411,6 @@
             setattr(self, cachename, get_image_dimensions(filename))
         return getattr(self, cachename)
 
-    # Handles setting many-to-many related objects.
-    # Example: Album.set_songs()
-    def _set_related_many_to_many(self, rel_class, rel_field, id_list):
-        id_list = map(int, id_list) # normalize to integers
-        rel = rel_field.rel.to
-        m2m_table = rel_field.m2m_db_table()
-        this_id = self._get_pk_val()
-        cursor = connection.cursor()
-        cursor.execute("DELETE FROM %s WHERE %s = %%s" % \
-            (backend.quote_name(m2m_table),
-            backend.quote_name(rel_field.m2m_column_name())), [this_id])
-        sql = "INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % \
-            (backend.quote_name(m2m_table),
-            backend.quote_name(rel_field.m2m_column_name()),
-            backend.quote_name(rel_field.m2m_reverse_name()))
-        cursor.executemany(sql, [(this_id, i) for i in id_list])
-        transaction.commit_unless_managed()
-
 ############################################
 # HELPER FUNCTIONS (CURRIED MODEL METHODS) #
 ############################################

Modified: cs/pythia/trunk/opal/db/models/fields/__init__.py
===================================================================
--- cs/pythia/trunk/opal/db/models/fields/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/fields/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -5,6 +5,7 @@
 from opal import forms
 from opal.core.exceptions import ObjectDoesNotExist
 from opal.utils.functional import curry
+from opal.utils.itercompat import tee
 from opal.utils.text import capfirst
 from opal.utils.translation import gettext, gettext_lazy
 import datetime, os, time
@@ -20,7 +21,7 @@
 BLANK_CHOICE_NONE = [("", "None")]
 
 # prepares a value for use in a LIKE query
-prep_for_like_query = lambda x: str(x).replace("%", "\%").replace("_", "\_")
+prep_for_like_query = lambda x: str(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_")
 
 # returns the <ul> class for a given radio_admin value
 get_ul_class = lambda x: 'radiolist%s' % ((x == HORIZONTAL) and ' inline' or '')
@@ -65,7 +66,7 @@
 
     def __init__(self, verbose_name=None, name=None, primary_key=False,
         maxlength=None, unique=False, blank=False, null=False, db_index=False,
-        core=False, rel=None, default=NOT_PROVIDED, editable=True,
+        core=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True,
         prepopulate_from=None, unique_for_date=None, unique_for_month=None,
         unique_for_year=None, validator_list=None, choices=None, radio_admin=None,
         help_text='', db_column=None):
@@ -76,11 +77,12 @@
         self.blank, self.null = blank, null
         self.core, self.rel, self.default = core, rel, default
         self.editable = editable
+        self.serialize = serialize
         self.validator_list = validator_list or []
         self.prepopulate_from = prepopulate_from
         self.unique_for_date, self.unique_for_month = unique_for_date, unique_for_month
         self.unique_for_year = unique_for_year
-        self.choices = choices or []
+        self._choices = choices or []
         self.radio_admin = radio_admin
         self.help_text = help_text
         self.db_column = db_column
@@ -162,7 +164,7 @@
 
     def get_db_prep_lookup(self, lookup_type, value):
         "Returns field's value prepared for database lookup."
-        if lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte', 'year', 'month', 'day', 'search'):
+        if lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte', 'month', 'day', 'search'):
             return [value]
         elif lookup_type in ('range', 'in'):
             return value
@@ -176,7 +178,13 @@
             return ["%%%s" % prep_for_like_query(value)]
         elif lookup_type == 'isnull':
             return []
-        raise TypeError, "Field has invalid lookup: %s" % lookup_type
+        elif lookup_type == 'year':
+            try:
+                value = int(value)
+            except ValueError:
+                raise ValueError("The __year lookup type requires an integer argument")
+            return ['%s-01-01 00:00:00' % value, '%s-12-31 23:59:59.999999' % value]
+        raise TypeError("Field has invalid lookup: %s" % lookup_type)
 
     def has_default(self):
         "Returns a boolean of whether this field has a default value."
@@ -289,8 +297,11 @@
         if self.choices:
             return first_choice + list(self.choices)
         rel_model = self.rel.to
-        return first_choice + [(x._get_pk_val(), str(x))
-                               for x in rel_model._default_manager.complex_filter(self.rel.limit_choices_to)]
+        if hasattr(self.rel, 'get_related_field'):
+            lst = [(getattr(x, self.rel.get_related_field().attname), str(x)) for x in rel_model._default_manager.complex_filter(self.rel.limit_choices_to)]
+        else:
+            lst = [(x._get_pk_val(), str(x)) for x in rel_model._default_manager.complex_filter(self.rel.limit_choices_to)]
+        return first_choice + lst
 
     def get_choices_default(self):
         if self.radio_admin:
@@ -321,6 +332,18 @@
     def bind(self, fieldmapping, original, bound_field_class):
         return bound_field_class(self, fieldmapping, original)
 
+    def _get_choices(self):
+        if hasattr(self._choices, 'next'):
+            choices, self._choices = tee(self._choices)
+            return choices
+        else:
+            return self._choices
+    choices = property(_get_choices)
+
+    def value_from_object(self, obj):
+        "Returns the value of this field in the given model instance."
+        return getattr(obj, self.attname)
+
 class AutoField(Field):
     empty_strings_allowed = False
     def __init__(self, *args, **kwargs):
@@ -364,8 +387,8 @@
 
     def to_python(self, value):
         if value in (True, False): return value
-        if value in ('t', 'True'): return True
-        if value in ('f', 'False'): return False
+        if value in ('t', 'True', '1'): return True
+        if value in ('f', 'False', '0'): return False
         raise validators.ValidationError, gettext("This value must be either True or False.")
 
     def get_manipulator_field_objs(self):
@@ -401,6 +424,8 @@
         Field.__init__(self, verbose_name, name, **kwargs)
 
     def to_python(self, value):
+        if value is None:
+            return value
         if isinstance(value, datetime.datetime):
             return value.date()
         if isinstance(value, datetime.date):
@@ -452,12 +477,14 @@
     def get_manipulator_field_objs(self):
         return [forms.DateField]
 
-    def flatten_data(self, follow, obj = None):
+    def flatten_data(self, follow, obj=None):
         val = self._get_val_from_obj(obj)
         return {self.attname: (val is not None and val.strftime("%Y-%m-%d") or '')}
 
 class DateTimeField(DateField):
     def to_python(self, value):
+        if value is None:
+            return value
         if isinstance(value, datetime.datetime):
             return value
         if isinstance(value, datetime.date):
@@ -564,7 +591,7 @@
         # If the raw path is passed in, validate it's under the MEDIA_ROOT.
         def isWithinMediaRoot(field_data, all_data):
             f = os.path.abspath(os.path.join(settings.MEDIA_ROOT, field_data))
-            if not f.startswith(os.path.normpath(settings.MEDIA_ROOT)):
+            if not f.startswith(os.path.abspath(os.path.normpath(settings.MEDIA_ROOT))):
                 raise validators.ValidationError, _("Enter a valid filename.")
         field_list[1].validator_list.append(isWithinMediaRoot)
         return field_list
@@ -574,7 +601,7 @@
         setattr(cls, 'get_%s_filename' % self.name, curry(cls._get_FIELD_filename, field=self))
         setattr(cls, 'get_%s_url' % self.name, curry(cls._get_FIELD_url, field=self))
         setattr(cls, 'get_%s_size' % self.name, curry(cls._get_FIELD_size, field=self))
-        setattr(cls, 'save_%s_file' % self.name, lambda instance, filename, raw_contents: instance._save_FIELD_file(self, filename, raw_contents))
+        setattr(cls, 'save_%s_file' % self.name, lambda instance, filename, raw_contents, save=True: instance._save_FIELD_file(self, filename, raw_contents, save))
         dispatcher.connect(self.delete_file, signal=signals.post_delete, sender=cls)
 
     def delete_file(self, instance):
@@ -592,14 +619,14 @@
     def get_manipulator_field_names(self, name_prefix):
         return [name_prefix + self.name + '_file', name_prefix + self.name]
 
-    def save_file(self, new_data, new_object, original_object, change, rel):
+    def save_file(self, new_data, new_object, original_object, change, rel, save=True):
         upload_field_name = self.get_manipulator_field_names('')[0]
         if new_data.get(upload_field_name, False):
             func = getattr(new_object, 'save_%s_file' % self.name)
             if rel:
-                func(new_data[upload_field_name][0]["filename"], new_data[upload_field_name][0]["content"])
+                func(new_data[upload_field_name][0]["filename"], new_data[upload_field_name][0]["content"], save)
             else:
-                func(new_data[upload_field_name]["filename"], new_data[upload_field_name]["content"])
+                func(new_data[upload_field_name]["filename"], new_data[upload_field_name]["content"], save)
 
     def get_directory_name(self):
         return os.path.normpath(datetime.datetime.now().strftime(self.upload_to))
@@ -643,12 +670,12 @@
         if not self.height_field:
             setattr(cls, 'get_%s_height' % self.name, curry(cls._get_FIELD_height, field=self))
 
-    def save_file(self, new_data, new_object, original_object, change, rel):
-        FileField.save_file(self, new_data, new_object, original_object, change, rel)
+    def save_file(self, new_data, new_object, original_object, change, rel, save=True):
+        FileField.save_file(self, new_data, new_object, original_object, change, rel, save)
         # If the image has height and/or width field(s) and they haven't
         # changed, set the width and/or height field(s) back to their original
         # values.
-        if change and (self.width_field or self.height_field):
+        if change and (self.width_field or self.height_field) and save:
             if self.width_field:
                 setattr(new_object, self.width_field, getattr(original_object, self.width_field))
             if self.height_field:
@@ -676,6 +703,13 @@
         kwargs['null'] = True
         Field.__init__(self, *args, **kwargs)
 
+    def to_python(self, value):
+        if value in (None, True, False): return value
+        if value in ('None'): return None
+        if value in ('t', 'True', '1'): return True
+        if value in ('f', 'False', '0'): return False
+        raise validators.ValidationError, gettext("This value must be either None, True or False.")
+
     def get_manipulator_field_objs(self):
         return [forms.NullBooleanField]
 
@@ -742,7 +776,7 @@
         if value is not None:
             # MySQL will throw a warning if microseconds are given, because it
             # doesn't support microseconds.
-            if settings.DATABASE_ENGINE == 'mysql':
+            if settings.DATABASE_ENGINE == 'mysql' and hasattr(value, 'microsecond'):
                 value = value.replace(microsecond=0)
             value = str(value)
         return Field.get_db_prep_save(self, value)
@@ -754,15 +788,20 @@
         val = self._get_val_from_obj(obj)
         return {self.attname: (val is not None and val.strftime("%H:%M:%S") or '')}
 
-class URLField(Field):
+class URLField(CharField):
     def __init__(self, verbose_name=None, name=None, verify_exists=True, **kwargs):
+        kwargs['maxlength'] = kwargs.get('maxlength', 200)
         if verify_exists:
             kwargs.setdefault('validator_list', []).append(validators.isExistingURL)
-        Field.__init__(self, verbose_name, name, **kwargs)
+        self.verify_exists = verify_exists
+        CharField.__init__(self, verbose_name, name, **kwargs)
 
     def get_manipulator_field_objs(self):
         return [forms.URLField]
 
+    def get_internal_type(self):
+        return "CharField"
+
 class USStateField(Field):
     def get_manipulator_field_objs(self):
         return [forms.USStateField]

Modified: cs/pythia/trunk/opal/db/models/fields/generic.py
===================================================================
--- cs/pythia/trunk/opal/db/models/fields/generic.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/fields/generic.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -94,6 +94,7 @@
         
         kwargs['blank'] = True
         kwargs['editable'] = False
+        kwargs['serialize'] = False
         Field.__init__(self, **kwargs)
 
     def get_manipulator_field_objs(self):
@@ -117,7 +118,7 @@
         return self.object_id_field_name
         
     def m2m_reverse_name(self):
-        return self.model._meta.pk.attname
+        return self.object_id_field_name
 
     def contribute_to_class(self, cls, name):
         super(GenericRelation, self).contribute_to_class(cls, name)

Modified: cs/pythia/trunk/opal/db/models/fields/related.py
===================================================================
--- cs/pythia/trunk/opal/db/models/fields/related.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/fields/related.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,7 +2,8 @@
 from opal.db.models import signals, get_model
 from opal.db.models.fields import AutoField, Field, IntegerField, get_ul_class
 from opal.db.models.related import RelatedObject
-from opal.utils.translation import gettext_lazy, string_concat
+from opal.utils.text import capfirst
+from opal.utils.translation import gettext_lazy, string_concat, ngettext
 from opal.utils.functional import curry
 from opal.core import validators
 from opal import forms
@@ -25,7 +26,7 @@
     key = (module, name)
     # Has the model already been loaded?
     # If so, resolve the string reference right away
-    model = get_model(rel_cls._meta.app_label,field.rel.to)
+    model = get_model(rel_cls._meta.app_label, field.rel.to, False)
     if model:
         field.rel.to = model
         field.do_related_class(model, rel_cls)
@@ -256,8 +257,7 @@
         # Otherwise, just move the named objects into the set.
         if self.related.field.null:
             manager.clear()
-        for obj in value:
-            manager.add(obj)
+        manager.add(*value)
 
 def create_many_related_manager(superclass):
     """Creates a manager that subclasses 'superclass' (which is a Manager)
@@ -315,28 +315,36 @@
             # join_table: name of the m2m link table
             # source_col_name: the PK colname in join_table for the source object
             # target_col_name: the PK colname in join_table for the target object
-            # *objs - objects to add
+            # *objs - objects to add. Either object instances, or primary keys of object instances.
             from opal.db import connection
 
-            # Add the newly created or already existing objects to the join table.
-            # First find out which items are already added, to avoid adding them twice
-            new_ids = set([obj._get_pk_val() for obj in objs])
-            cursor = connection.cursor()
-            cursor.execute("SELECT %s FROM %s WHERE %s = %%s AND %s IN (%s)" % \
-                (target_col_name, self.join_table, source_col_name,
-                target_col_name, ",".join(['%s'] * len(new_ids))),
-                [self._pk_val] + list(new_ids))
-            if cursor.rowcount is not None and cursor.rowcount != 0:
-                existing_ids = set([row[0] for row in cursor.fetchmany(cursor.rowcount)])
-            else:
-                existing_ids = set()
+            # If there aren't any objects, there is nothing to do.
+            if objs:
+                # Check that all the objects are of the right type
+                new_ids = set()
+                for obj in objs:
+                    if isinstance(obj, self.model):
+                        new_ids.add(obj._get_pk_val())
+                    else:
+                        new_ids.add(obj)
+                # Add the newly created or already existing objects to the join table.
+                # First find out which items are already added, to avoid adding them twice
+                cursor = connection.cursor()
+                cursor.execute("SELECT %s FROM %s WHERE %s = %%s AND %s IN (%s)" % \
+                    (target_col_name, self.join_table, source_col_name,
+                    target_col_name, ",".join(['%s'] * len(new_ids))),
+                    [self._pk_val] + list(new_ids))
+                if cursor.rowcount is not None and cursor.rowcount != 0:
+                    existing_ids = set([row[0] for row in cursor.fetchmany(cursor.rowcount)])
+                else:
+                    existing_ids = set()
 
-            # Add the ones that aren't there already
-            for obj_id in (new_ids - existing_ids):
-                cursor.execute("INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % \
-                    (self.join_table, source_col_name, target_col_name),
-                    [self._pk_val, obj_id])
-            transaction.commit_unless_managed()
+                # Add the ones that aren't there already
+                for obj_id in (new_ids - existing_ids):
+                    cursor.execute("INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % \
+                        (self.join_table, source_col_name, target_col_name),
+                        [self._pk_val, obj_id])
+                transaction.commit_unless_managed()
 
         def _remove_items(self, source_col_name, target_col_name, *objs):
             # source_col_name: the PK colname in join_table for the source object
@@ -344,16 +352,22 @@
             # *objs - objects to remove
             from opal.db import connection
 
-            for obj in objs:
-                if not isinstance(obj, self.model):
-                    raise ValueError, "objects to remove() must be %s instances" % self.model._meta.object_name
-            # Remove the specified objects from the join table
-            cursor = connection.cursor()
-            for obj in objs:
-                cursor.execute("DELETE FROM %s WHERE %s = %%s AND %s = %%s" % \
-                    (self.join_table, source_col_name, target_col_name),
-                    [self._pk_val, obj._get_pk_val()])
-            transaction.commit_unless_managed()
+            # If there aren't any objects, there is nothing to do.
+            if objs:
+                # Check that all the objects are of the right type
+                old_ids = set()
+                for obj in objs:
+                    if isinstance(obj, self.model):
+                        old_ids.add(obj._get_pk_val())
+                    else:
+                        old_ids.add(obj)
+                # Remove the specified objects from the join table
+                cursor = connection.cursor()
+                cursor.execute("DELETE FROM %s WHERE %s = %%s AND %s IN (%s)" % \
+                    (self.join_table, source_col_name,
+                    target_col_name, ",".join(['%s'] * len(old_ids))),
+                    [self._pk_val] + list(old_ids))
+                transaction.commit_unless_managed()
 
         def _clear_items(self, source_col_name):
             # source_col_name: the PK colname in join_table for the source object
@@ -405,8 +419,7 @@
 
         manager = self.__get__(instance)
         manager.clear()
-        for obj in value:
-            manager.add(obj)
+        manager.add(*value)
 
 class ReverseManyRelatedObjectsDescriptor(object):
     # This class provides the functionality that makes the related-object
@@ -447,8 +460,7 @@
 
         manager = self.__get__(instance)
         manager.clear()
-        for obj in value:
-            manager.add(obj)
+        manager.add(*value)
 
 class ForeignKey(RelatedField, Field):
     empty_strings_allowed = False
@@ -610,6 +622,7 @@
             limit_choices_to=kwargs.pop('limit_choices_to', None),
             raw_id_admin=kwargs.pop('raw_id_admin', False),
             symmetrical=kwargs.pop('symmetrical', True))
+        self.db_table = kwargs.pop('db_table', None)
         if kwargs["rel"].raw_id_admin:
             kwargs.setdefault("validator_list", []).append(self.isValidIDList)
         Field.__init__(self, **kwargs)
@@ -618,7 +631,7 @@
             msg = gettext_lazy('Separate multiple IDs with commas.')
         else:
             msg = gettext_lazy('Hold down "Control", or "Command" on a Mac, to select more than one.')
-        self.help_text = string_concat(self.help_text, msg)
+        self.help_text = string_concat(self.help_text, ' ', msg)
 
     def get_manipulator_field_objs(self):
         if self.rel.raw_id_admin:
@@ -632,7 +645,10 @@
 
     def _get_m2m_db_table(self, opts):
         "Function that can be curried to provide the m2m table name for this relation"
-        return '%s_%s' % (opts.db_table, self.name)
+        if self.db_table:
+            return self.db_table
+        else:
+            return '%s_%s' % (opts.db_table, self.name)
 
     def _get_m2m_column_name(self, related):
         "Function that can be curried to provide the source column name for the m2m table"
@@ -706,6 +722,10 @@
     def set_attributes_from_rel(self):
         pass
 
+    def value_from_object(self, obj):
+        "Returns the value of this field in the given model instance."
+        return getattr(obj, self.attname).all()
+
 class ManyToOneRel(object):
     def __init__(self, to, field_name, num_in_admin=3, min_num_in_admin=None,
         max_num_in_admin=None, num_extra_on_change=1, edit_inline=False,

Modified: cs/pythia/trunk/opal/db/models/loading.py
===================================================================
--- cs/pythia/trunk/opal/db/models/loading.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/loading.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -32,7 +32,7 @@
                 _app_errors[app_name] = e
     return _app_list
 
-def get_app(app_label, emptyOK = False):
+def get_app(app_label, emptyOK=False):
     "Returns the module containing the models for the given app_label. If the app has no models in it and 'emptyOK' is True, returns None."
     get_apps() # Run get_apps() to populate the _app_list cache. Slightly hackish.
     for app_name in settings.INSTALLED_APPS:
@@ -48,7 +48,7 @@
 def load_app(app_name):
     "Loads the app with the provided fully qualified name, and returns the model module."
     global _app_list
-    mod = __import__(app_name, '', '', ['models'])
+    mod = __import__(app_name, {}, {}, ['models'])
     if not hasattr(mod, 'models'):
         return None
     if mod.models not in _app_list:
@@ -75,11 +75,15 @@
             model_list.extend(get_models(app_mod))
         return model_list
 
-def get_model(app_label, model_name):
+def get_model(app_label, model_name, seed_cache=True):
     """
-    Returns the model matching the given app_label and case-insensitive model_name.
+    Returns the model matching the given app_label and case-insensitive
+    model_name.
+
     Returns None if no model is found.
     """
+    if seed_cache:
+        get_apps()
     try:
         model_dict = _app_models[app_label]
     except KeyError:

Modified: cs/pythia/trunk/opal/db/models/manager.py
===================================================================
--- cs/pythia/trunk/opal/db/models/manager.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/manager.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,4 +1,4 @@
-from opal.db.models.query import QuerySet
+from opal.db.models.query import QuerySet, EmptyQuerySet
 from opal.dispatch import dispatcher
 from opal.db.models import signals
 from opal.db.models.fields import FieldDoesNotExist
@@ -41,12 +41,18 @@
     #######################
     # PROXIES TO QUERYSET #
     #######################
+    
+    def get_empty_query_set(self):
+        return EmptyQuerySet(self.model)
 
     def get_query_set(self):
         """Returns a new QuerySet object.  Subclasses can override this method
         to easily customise the behaviour of the Manager.
         """
         return QuerySet(self.model)
+    
+    def none(self):
+        return self.get_empty_query_set()
 
     def all(self):
         return self.get_query_set()

Modified: cs/pythia/trunk/opal/db/models/manipulators.py
===================================================================
--- cs/pythia/trunk/opal/db/models/manipulators.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/manipulators.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -96,15 +96,17 @@
         if self.change:
             params[self.opts.pk.attname] = self.obj_key
 
-        # First, save the basic object itself.
+        # First, create the basic object itself.
         new_object = self.model(**params)
-        new_object.save()
 
-        # Now that the object's been saved, save any uploaded files.
+        # Now that the object's been created, save any uploaded files.
         for f in self.opts.fields:
             if isinstance(f, FileField):
-                f.save_file(new_data, new_object, self.change and self.original_object or None, self.change, rel=False)
+                f.save_file(new_data, new_object, self.change and self.original_object or None, self.change, rel=False, save=False)
 
+        # Now save the object
+        new_object.save()
+
         # Calculate which primary fields have changed.
         if self.change:
             self.fields_added, self.fields_changed, self.fields_deleted = [], [], []
@@ -138,7 +140,7 @@
             child_follow = self.follow.get(related.name, None)
 
             if child_follow:
-                obj_list = expanded_data[related.var_name].items()
+                obj_list = expanded_data.get(related.var_name, {}).items()
                 if not obj_list:
                     continue
 
@@ -177,7 +179,7 @@
                         # case, because they'll be dealt with later.
 
                         if f == related.field:
-                            param = getattr(new_object, related.field.rel.field_name)
+                            param = getattr(new_object, related.field.rel.get_related_field().attname)
                         elif (not self.change) and isinstance(f, AutoField):
                             param = None
                         elif self.change and (isinstance(f, FileField) or not child_follow.get(f.name, None)):
@@ -215,8 +217,11 @@
                         # Save many-to-many objects.
                         for f in related.opts.many_to_many:
                             if child_follow.get(f.name, None) and not f.rel.edit_inline:
-                                was_changed = getattr(new_rel_obj, 'set_%s' % f.name)(rel_new_data[f.attname])
-                                if self.change and was_changed:
+                                new_value = rel_new_data[f.attname]
+                                if f.rel.raw_id_admin:
+                                    new_value = new_value[0]
+                                setattr(new_rel_obj, f.name, f.rel.to.objects.filter(pk__in=new_value))
+                                if self.change:
                                     self.fields_changed.append('%s for %s "%s"' % (f.verbose_name, related.opts.verbose_name, new_rel_obj))
 
                     # If, in the change stage, all of the core fields were blank and
@@ -283,7 +288,7 @@
         # This is really not going to work for fields that have different
         # form fields, e.g. DateTime.
         # This validation needs to occur after html2python to be effective.
-        field_val = all_data.get(f.attname, None)
+        field_val = all_data.get(f.name, None)
         if field_val is None:
             # This will be caught by another validator, assuming the field
             # doesn't have blank=True.
@@ -300,7 +305,7 @@
         pass
     else:
         raise validators.ValidationError, _("%(object)s with this %(type)s already exists for the given %(field)s.") % \
-            {'object': capfirst(opts.verbose_name), 'type': field_list[0].verbose_name, 'field': get_text_list(field_name_list[1:], 'and')}
+            {'object': capfirst(opts.verbose_name), 'type': field_list[0].verbose_name, 'field': get_text_list([f.verbose_name for f in field_list[1:]], _('and'))}
 
 def manipulator_validator_unique_for_date(from_field, date_field, opts, lookup_type, self, field_data, all_data):
     from opal.db.models.fields.related import ManyToOneRel

Modified: cs/pythia/trunk/opal/db/models/options.py
===================================================================
--- cs/pythia/trunk/opal/db/models/options.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/options.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -84,6 +84,7 @@
             self.fields.insert(bisect(self.fields, field), field)
             if not self.pk and field.primary_key:
                 self.pk = field
+                field.serialize = False
 
     def __repr__(self):
         return '<Options for %s>' % self.object_name

Modified: cs/pythia/trunk/opal/db/models/query.py
===================================================================
--- cs/pythia/trunk/opal/db/models/query.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/query.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,5 +1,6 @@
 from opal.db import backend, connection, transaction
 from opal.db.models.fields import DateField, FieldDoesNotExist
+from opal.db.models.fields.generic import GenericRelation
 from opal.db.models import signals
 from opal.dispatch import dispatcher
 from opal.utils.datastructures import SortedDict
@@ -25,6 +26,9 @@
 # Larger values are slightly faster at the expense of more storage space.
 GET_ITERATOR_CHUNK_SIZE = 100
 
+class EmptyResultSet(Exception):
+    pass
+
 ####################
 # HELPER FUNCTIONS #
 ####################
@@ -80,6 +84,7 @@
         self._filters = Q()
         self._order_by = None        # Ordering, e.g. ('date', '-name'). If None, use model's ordering.
         self._select_related = False # Whether to fill cache for related objects.
+        self._max_related_depth = 0  # Maximum "depth" for select_related
         self._distinct = False       # Whether the query should use SELECT DISTINCT.
         self._select = {}            # Dictionary of attname -> SQL.
         self._where = []             # List of extra WHERE clauses to use.
@@ -104,6 +109,8 @@
 
     def __getitem__(self, k):
         "Retrieve an item or slice from the set of results."
+        if not isinstance(k, (slice, int)):
+            raise TypeError
         assert (not isinstance(k, slice) and (k >= 0)) \
             or (isinstance(k, slice) and (k.start is None or k.start >= 0) and (k.stop is None or k.stop >= 0)), \
             "Negative indexing is not supported."
@@ -163,12 +170,16 @@
 
     def iterator(self):
         "Performs the SELECT database lookup of this QuerySet."
+        try:
+            select, sql, params = self._get_sql_clause()
+        except EmptyResultSet:
+            raise StopIteration
+
         # self._select is a dictionary, and dictionaries' key order is
         # undefined, so we convert it to a list of tuples.
         extra_select = self._select.items()
 
         cursor = connection.cursor()
-        select, sql, params = self._get_sql_clause()
         cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params)
         fill_cache = self._select_related
         index_end = len(self.model._meta.fields)
@@ -178,7 +189,8 @@
                 raise StopIteration
             for row in rows:
                 if fill_cache:
-                    obj, index_end = get_cached_row(self.model, row, 0)
+                    obj, index_end = get_cached_row(klass=self.model, row=row, 
+                                                    index_start=0, max_depth=self._max_related_depth)
                 else:
                     obj = self.model(*row[:index_end])
                 for i, k in enumerate(extra_select):
@@ -186,13 +198,31 @@
                 yield obj
 
     def count(self):
-        "Performs a SELECT COUNT() and returns the number of records as an integer."
+        """
+        Performs a SELECT COUNT() and returns the number of records as an
+        integer.
+        
+        If the queryset is already cached (i.e. self._result_cache is set) this
+        simply returns the length of the cached results set to avoid multiple
+        SELECT COUNT(*) calls.
+        """
+        if self._result_cache is not None:
+            return len(self._result_cache)
+            
         counter = self._clone()
         counter._order_by = ()
+        counter._select_related = False
+
+        offset = counter._offset
+        limit = counter._limit
         counter._offset = None
         counter._limit = None
-        counter._select_related = False
-        select, sql, params = counter._get_sql_clause()
+
+        try:
+            select, sql, params = counter._get_sql_clause()
+        except EmptyResultSet:
+            return 0
+
         cursor = connection.cursor()
         if self._distinct:
             id_col = "%s.%s" % (backend.quote_name(self.model._meta.db_table),
@@ -200,8 +230,17 @@
             cursor.execute("SELECT COUNT(DISTINCT(%s))" % id_col + sql, params)
         else:
             cursor.execute("SELECT COUNT(*)" + sql, params)
-        return cursor.fetchone()[0]
+        count = cursor.fetchone()[0]
 
+        # Apply any offset and limit constraints manually, since using LIMIT or
+        # OFFSET in SQL doesn't change the output of COUNT.
+        if offset:
+            count = max(0, count - offset)
+        if limit:
+            count = min(limit, count)
+
+        return count
+
     def get(self, *args, **kwargs):
         "Performs the SELECT and returns a single object matching the given keyword arguments."
         clone = self.filter(*args, **kwargs)
@@ -359,9 +398,9 @@
         else:
             return self._filter_or_exclude(None, **filter_obj)
 
-    def select_related(self, true_or_false=True):
+    def select_related(self, true_or_false=True, depth=0):
         "Returns a new QuerySet instance with '_select_related' modified."
-        return self._clone(_select_related=true_or_false)
+        return self._clone(_select_related=true_or_false, _max_related_depth=depth)
 
     def order_by(self, *field_names):
         "Returns a new QuerySet instance with the ordering changed."
@@ -395,6 +434,7 @@
         c._filters = self._filters
         c._order_by = self._order_by
         c._select_related = self._select_related
+        c._max_related_depth = self._max_related_depth
         c._distinct = self._distinct
         c._select = self._select.copy()
         c._where = self._where[:]
@@ -448,7 +488,10 @@
 
         # Add additional tables and WHERE clauses based on select_related.
         if self._select_related:
-            fill_table_cache(opts, select, tables, where, opts.db_table, [opts.db_table])
+            fill_table_cache(opts, select, tables, where, 
+                             old_prefix=opts.db_table, 
+                             cache_tables_seen=[opts.db_table], 
+                             max_depth=self._max_related_depth)
 
         # Add any additional SELECTs.
         if self._select:
@@ -509,11 +552,18 @@
         return select, " ".join(sql), params
 
 class ValuesQuerySet(QuerySet):
-    def iterator(self):
+    def __init__(self, *args, **kwargs):
+        super(ValuesQuerySet, self).__init__(*args, **kwargs)
         # select_related and select aren't supported in values().
         self._select_related = False
         self._select = {}
 
+    def iterator(self):
+        try:
+            select, sql, params = self._get_sql_clause()
+        except EmptyResultSet:
+            raise StopIteration
+
         # self._fields is a list of field names to fetch.
         if self._fields:
             columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields]
@@ -522,9 +572,8 @@
             columns = [f.column for f in self.model._meta.fields]
             field_names = [f.attname for f in self.model._meta.fields]
 
+        select = ['%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns]
         cursor = connection.cursor()
-        select, sql, params = self._get_sql_clause()
-        select = ['%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns]
         cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params)
         while 1:
             rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
@@ -545,7 +594,12 @@
         if self._field.null:
             self._where.append('%s.%s IS NOT NULL' % \
                 (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column)))
-        select, sql, params = self._get_sql_clause()
+
+        try:
+            select, sql, params = self._get_sql_clause()
+        except EmptyResultSet:
+            raise StopIteration
+
         sql = 'SELECT %s %s GROUP BY 1 ORDER BY 1 %s' % \
             (backend.get_date_trunc_sql(self._kind, '%s.%s' % (backend.quote_name(self.model._meta.db_table),
             backend.quote_name(self._field.column))), sql, self._order)
@@ -563,6 +617,25 @@
         c._order = self._order
         return c
 
+class EmptyQuerySet(QuerySet):
+    def __init__(self, model=None):
+        super(EmptyQuerySet, self).__init__(model)
+        self._result_cache = []
+
+    def count(self):
+        return 0
+
+    def delete(self):
+        pass
+
+    def _clone(self, klass=None, **kwargs):
+        c = super(EmptyQuerySet, self)._clone(klass, **kwargs)
+        c._result_cache = []
+        return c
+
+    def _get_sql_clause(self):
+        raise EmptyResultSet
+
 class QOperator(object):
     "Base class for QAnd and QOr"
     def __init__(self, *args):
@@ -571,10 +644,14 @@
     def get_sql(self, opts):
         joins, where, params = SortedDict(), [], []
         for val in self.args:
-            joins2, where2, params2 = val.get_sql(opts)
-            joins.update(joins2)
-            where.extend(where2)
-            params.extend(params2)
+            try:
+                joins2, where2, params2 = val.get_sql(opts)
+                joins.update(joins2)
+                where.extend(where2)
+                params.extend(params2)
+            except EmptyResultSet:
+                if not isinstance(self, QOr):
+                    raise EmptyResultSet
         if where:
             return joins, ['(%s)' % self.operator.join(where)], params
         return joins, [], params
@@ -628,8 +705,11 @@
         self.q = q
 
     def get_sql(self, opts):
-        joins, where, params = self.q.get_sql(opts)
-        where2 = ['(NOT (%s))' % " AND ".join(where)]
+        try:
+            joins, where, params = self.q.get_sql(opts)
+            where2 = ['(NOT (%s))' % " AND ".join(where)]
+        except EmptyResultSet:
+            return SortedDict(), [], []
         return joins, where2, params
 
 def get_where_clause(lookup_type, table_prefix, field_name, value):
@@ -641,10 +721,14 @@
     except KeyError:
         pass
     if lookup_type == 'in':
-        return '%s%s IN (%s)' % (table_prefix, field_name, ','.join(['%s' for v in value]))
-    elif lookup_type == 'range':
+        in_string = ','.join(['%s' for id in value])
+        if in_string:
+            return '%s%s IN (%s)' % (table_prefix, field_name, in_string)
+        else:
+            raise EmptyResultSet
+    elif lookup_type in ('range', 'year'):
         return '%s%s BETWEEN %%s AND %%s' % (table_prefix, field_name)
-    elif lookup_type in ('year', 'month', 'day'):
+    elif lookup_type in ('month', 'day'):
         return "%s = %%s" % backend.get_date_extract_sql(lookup_type, table_prefix + field_name)
     elif lookup_type == 'isnull':
         return "%s%s IS %sNULL" % (table_prefix, field_name, (not value and 'NOT ' or ''))
@@ -652,21 +736,33 @@
         return backend.get_fulltext_search_sql(table_prefix + field_name)
     raise TypeError, "Got invalid lookup_type: %s" % repr(lookup_type)
 
-def get_cached_row(klass, row, index_start):
-    "Helper function that recursively returns an object with cache filled"
+def get_cached_row(klass, row, index_start, max_depth=0, cur_depth=0):
+    """Helper function that recursively returns an object with cache filled"""
+    
+    # If we've got a max_depth set and we've exceeded that depth, bail now.
+    if max_depth and cur_depth > max_depth:
+        return None
+    
     index_end = index_start + len(klass._meta.fields)
     obj = klass(*row[index_start:index_end])
     for f in klass._meta.fields:
         if f.rel and not f.null:
-            rel_obj, index_end = get_cached_row(f.rel.to, row, index_end)
-            setattr(obj, f.get_cache_name(), rel_obj)
+            cached_row = get_cached_row(f.rel.to, row, index_end, max_depth, cur_depth+1)
+            if cached_row:
+                rel_obj, index_end = cached_row
+                setattr(obj, f.get_cache_name(), rel_obj)
     return obj, index_end
 
-def fill_table_cache(opts, select, tables, where, old_prefix, cache_tables_seen):
+def fill_table_cache(opts, select, tables, where, old_prefix, cache_tables_seen, max_depth=0, cur_depth=0):
     """
     Helper function that recursively populates the select, tables and where (in
     place) for select_related queries.
     """
+    
+    # If we've got a max_depth set and we've exceeded that depth, bail now.
+    if max_depth and cur_depth > max_depth:
+        return None
+    
     qn = backend.quote_name
     for f in opts.fields:
         if f.rel and not f.null:
@@ -681,12 +777,12 @@
             where.append('%s.%s = %s.%s' % \
                 (qn(old_prefix), qn(f.column), qn(db_table), qn(f.rel.get_related_field().column)))
             select.extend(['%s.%s' % (qn(db_table), qn(f2.column)) for f2 in f.rel.to._meta.fields])
-            fill_table_cache(f.rel.to._meta, select, tables, where, db_table, cache_tables_seen)
+            fill_table_cache(f.rel.to._meta, select, tables, where, db_table, cache_tables_seen, max_depth, cur_depth+1)
 
 def parse_lookup(kwarg_items, opts):
     # Helper function that handles converting API kwargs
     # (e.g. "name__exact": "tom") to SQL.
-    # Returns a tuple of (tables, joins, where, params).
+    # Returns a tuple of (joins, where, params).
 
     # 'joins' is a sorted dictionary describing the tables that must be joined
     # to complete the query. The dictionary is sorted because creation order
@@ -707,34 +803,35 @@
     joins, where, params = SortedDict(), [], []
 
     for kwarg, value in kwarg_items:
-        if value is not None:
-            path = kwarg.split(LOOKUP_SEPARATOR)
-            # Extract the last elements of the kwarg.
-            # The very-last is the lookup_type (equals, like, etc).
-            # The second-last is the table column on which the lookup_type is
-            # to be performed.
-            # The exceptions to this are:
-            # 1)  "pk", which is an implicit id__exact;
-            #     if we find "pk", make the lookup_type "exact', and insert
-            #     a dummy name of None, which we will replace when
-            #     we know which table column to grab as the primary key.
-            # 2)  If there is only one part, or the last part is not a query
-            #     term, assume that the query is an __exact
-            lookup_type = path.pop()
-            if lookup_type == 'pk':
-                lookup_type = 'exact'
-                path.append(None)
-            elif len(path) == 0 or lookup_type not in QUERY_TERMS:
-                path.append(lookup_type)
-                lookup_type = 'exact'
+        path = kwarg.split(LOOKUP_SEPARATOR)
+        # Extract the last elements of the kwarg.
+        # The very-last is the lookup_type (equals, like, etc).
+        # The second-last is the table column on which the lookup_type is
+        # to be performed. If this name is 'pk', it will be substituted with
+        # the name of the primary key.
+        # If there is only one part, or the last part is not a query
+        # term, assume that the query is an __exact
+        lookup_type = path.pop()
+        if lookup_type == 'pk':
+            lookup_type = 'exact'
+            path.append(None)
+        elif len(path) == 0 or lookup_type not in QUERY_TERMS:
+            path.append(lookup_type)
+            lookup_type = 'exact'
 
-            if len(path) < 1:
-                raise TypeError, "Cannot parse keyword query %r" % kwarg
+        if len(path) < 1:
+            raise TypeError, "Cannot parse keyword query %r" % kwarg
 
-            joins2, where2, params2 = lookup_inner(path, lookup_type, value, opts, opts.db_table, None)
-            joins.update(joins2)
-            where.extend(where2)
-            params.extend(params2)
+        if value is None:
+            # Interpret '__exact=None' as the sql '= NULL'; otherwise, reject
+            # all uses of None as a query value.
+            if lookup_type != 'exact':
+                raise ValueError, "Cannot use None as a query value"
+
+        joins2, where2, params2 = lookup_inner(path, lookup_type, value, opts, opts.db_table, None)
+        joins.update(joins2)
+        where.extend(where2)
+        params.extend(params2)
     return joins, where, params
 
 class FieldFound(Exception):
@@ -766,7 +863,7 @@
     name = path.pop(0)
     # Has the primary key been requested? If so, expand it out
     # to be the name of the current class' primary key
-    if name is None:
+    if name is None or name == 'pk':
         name = current_opts.pk.name
 
     # Try to find the name in the fields associated with the current class
@@ -826,9 +923,15 @@
                 new_opts = field.rel.to._meta
                 new_column = new_opts.pk.column
                 join_column = field.column
+                raise FieldFound
+            elif path:
+                # For regular fields, if there are still items on the path,
+                # an error has been made. We munge "name" so that the error
+                # properly identifies the cause of the problem.
+                name += LOOKUP_SEPARATOR + path[0]
+            else:
+                raise FieldFound
 
-            raise FieldFound
-
     except FieldFound: # Match found, loop has been shortcut.
         pass
     else: # No match found.
@@ -925,18 +1028,26 @@
 
         pk_list = [pk for pk,instance in seen_objs[cls]]
         for related in cls._meta.get_all_related_many_to_many_objects():
-            for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
-                cursor.execute("DELETE FROM %s WHERE %s IN (%s)" % \
-                    (qn(related.field.m2m_db_table()),
-                        qn(related.field.m2m_reverse_name()),
-                        ','.join(['%s' for pk in pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE]])),
-                    pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE])
+            if not isinstance(related.field, GenericRelation):
+                for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
+                    cursor.execute("DELETE FROM %s WHERE %s IN (%s)" % \
+                        (qn(related.field.m2m_db_table()),
+                            qn(related.field.m2m_reverse_name()),
+                            ','.join(['%s' for pk in pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE]])),
+                        pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE])
         for f in cls._meta.many_to_many:
+            if isinstance(f, GenericRelation):
+                from opal.contrib.contenttypes.models import ContentType
+                query_extra = 'AND %s=%%s' % f.rel.to._meta.get_field(f.content_type_field_name).column
+                args_extra = [ContentType.objects.get_for_model(cls).id]
+            else:
+                query_extra = ''
+                args_extra = []
             for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
-                cursor.execute("DELETE FROM %s WHERE %s IN (%s)" % \
+                cursor.execute(("DELETE FROM %s WHERE %s IN (%s)" % \
                     (qn(f.m2m_db_table()), qn(f.m2m_column_name()),
-                    ','.join(['%s' for pk in pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE]])),
-                    pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE])
+                    ','.join(['%s' for pk in pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE]]))) + query_extra,
+                    pk_list[offset:offset+GET_ITERATOR_CHUNK_SIZE] + args_extra)
         for field in cls._meta.fields:
             if field.rel and field.null and field.rel.to in seen_objs:
                 for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):

Modified: cs/pythia/trunk/opal/db/models/related.py
===================================================================
--- cs/pythia/trunk/opal/db/models/related.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/db/models/related.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,7 +1,7 @@
 class BoundRelatedObject(object):
     def __init__(self, related_object, field_mapping, original):
         self.relation = related_object
-        self.field_mappings = field_mapping[related_object.opts.module_name]
+        self.field_mappings = field_mapping[related_object.name]
 
     def template_name(self):
         raise NotImplementedError
@@ -16,7 +16,7 @@
         self.opts = model._meta
         self.field = field
         self.edit_inline = field.rel.edit_inline
-        self.name = self.opts.module_name
+        self.name = '%s:%s' % (self.opts.app_label, self.opts.module_name)
         self.var_name = self.opts.object_name.lower()
 
     def flatten_data(self, follow, obj=None):
@@ -68,7 +68,10 @@
                 # object
                 return [attr]
         else:
-            return [None] * self.field.rel.num_in_admin
+            if self.field.rel.min_num_in_admin:
+                return [None] * max(self.field.rel.num_in_admin, self.field.rel.min_num_in_admin)
+            else:
+                return [None] * self.field.rel.num_in_admin
 
     def get_db_prep_lookup(self, lookup_type, value):
         # Defer to the actual field definition for db prep
@@ -101,12 +104,12 @@
                 attr = getattr(manipulator.original_object, self.get_accessor_name())
                 count = attr.count()
                 count += self.field.rel.num_extra_on_change
-                if self.field.rel.min_num_in_admin:
-                    count = max(count, self.field.rel.min_num_in_admin)
-                if self.field.rel.max_num_in_admin:
-                    count = min(count, self.field.rel.max_num_in_admin)
             else:
                 count = self.field.rel.num_in_admin
+            if self.field.rel.min_num_in_admin:
+                count = max(count, self.field.rel.min_num_in_admin)
+            if self.field.rel.max_num_in_admin:
+                count = min(count, self.field.rel.max_num_in_admin)
         else:
             count = 1
 
@@ -131,6 +134,9 @@
         # many-to-many objects. It uses the lower-cased object_name + "_set",
         # but this can be overridden with the "related_name" option.
         if self.field.rel.multiple:
+            # If this is a symmetrical m2m relation on self, there is no reverse accessor.
+            if getattr(self.field.rel, 'symmetrical', False) and self.model == self.parent_model:
+                return None
             return self.field.rel.related_name or (self.opts.object_name.lower() + '_set')
         else:
             return self.field.rel.related_name or (self.opts.object_name.lower())

Modified: cs/pythia/trunk/opal/dispatch/dispatcher.py
===================================================================
--- cs/pythia/trunk/opal/dispatch/dispatcher.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/dispatch/dispatcher.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -25,7 +25,6 @@
         deletion, (considerably speeds up the cleanup process
         vs. the original code.)
 """
-from __future__ import generators
 import types, weakref
 from opal.dispatch import saferef, robustapply, errors
 
@@ -33,11 +32,6 @@
 __cvsid__ = "$Id: dispatcher.py,v 1.9 2005/09/17 04:55:57 mcfletch Exp $"
 __version__ = "$Revision: 1.9 $"[11:-2]
 
-try:
-    True
-except NameError:
-    True = 1==1
-    False = 1==0
 
 class _Parameter:
     """Used to represent default parameter values."""
@@ -140,10 +134,9 @@
     if weak:
         receiver = saferef.safeRef(receiver, onDelete=_removeReceiver)
     senderkey = id(sender)
-    if connections.has_key(senderkey):
-        signals = connections[senderkey]
-    else:
-        connections[senderkey] = signals = {}
+
+    signals = connections.setdefault(senderkey, {})
+
     # Keep track of senders for cleanup.
     # Is Anonymous something we want to clean up?
     if sender not in (None, Anonymous, Any):
@@ -251,10 +244,10 @@
     to retrieve the actual receiver objects as an iterable
     object.
     """
-    try:
-        return connections[id(sender)][signal]
-    except KeyError:
-        return []
+    existing = connections.get(id(sender))
+    if existing is not None:
+        return existing.get(signal, [])
+    return []
 
 def liveReceivers(receivers):
     """Filter sequence of receivers to get resolved, live receivers
@@ -278,31 +271,49 @@
 def getAllReceivers( sender = Any, signal = Any ):
     """Get list of all receivers from global tables
 
-    This gets all receivers which should receive
+    This gets all dereferenced receivers which should receive
     the given signal from sender, each receiver should
     be produced only once by the resulting generator
     """
     receivers = {}
-    for set in (
-        # Get receivers that receive *this* signal from *this* sender.
-        getReceivers( sender, signal ),
-        # Add receivers that receive *any* signal from *this* sender.
-        getReceivers( sender, Any ),
-        # Add receivers that receive *this* signal from *any* sender.
-        getReceivers( Any, signal ),
-        # Add receivers that receive *any* signal from *any* sender.
-        getReceivers( Any, Any ),
-    ):
-        for receiver in set:
-            if receiver: # filter out dead instance-method weakrefs
-                try:
-                    if not receivers.has_key( receiver ):
-                        receivers[receiver] = 1
-                        yield receiver
-                except TypeError:
-                    # dead weakrefs raise TypeError on hash...
-                    pass
+    # Get receivers that receive *this* signal from *this* sender.
+    # Add receivers that receive *any* signal from *this* sender.
+    # Add receivers that receive *this* signal from *any* sender.
+    # Add receivers that receive *any* signal from *any* sender.
+    l = []
+    i = id(sender)
+    if i in connections:
+        sender_receivers = connections[i]
+        if signal in sender_receivers:
+            l.extend(sender_receivers[signal])
+        if signal is not Any and Any in sender_receivers:
+            l.extend(sender_receivers[Any])
 
+    if sender is not Any:
+        i = id(Any)
+        if i in connections:
+            sender_receivers = connections[i]
+            if sender_receivers is not None:
+                if signal in sender_receivers:
+                    l.extend(sender_receivers[signal])
+                if signal is not Any and Any in sender_receivers:
+                    l.extend(sender_receivers[Any])
+
+    for receiver in l:
+        try:
+            if not receiver in receivers:
+                if isinstance(receiver, WEAKREF_TYPES):
+                    receiver = receiver()
+                    # this should only (rough guess) be possible if somehow, deref'ing
+                    # triggered a wipe.
+                    if receiver is None:
+                        continue
+                receivers[receiver] = 1
+                yield receiver
+        except TypeError:
+            # dead weakrefs raise TypeError on hash...
+            pass
+
 def send(signal=Any, sender=Anonymous, *arguments, **named):
     """Send signal from sender to all connected receivers.
     
@@ -340,7 +351,7 @@
     # Call each receiver with whatever arguments it can accept.
     # Return a list of tuple pairs [(receiver, response), ... ].
     responses = []
-    for receiver in liveReceivers(getAllReceivers(sender, signal)):
+    for receiver in getAllReceivers(sender, signal):
         response = robustapply.robustApply(
             receiver,
             signal=signal,
@@ -350,6 +361,8 @@
         )
         responses.append((receiver, response))
     return responses
+
+
 def sendExact( signal=Any, sender=Anonymous, *arguments, **named ):
     """Send signal only to those receivers registered for exact message
 
@@ -421,33 +434,18 @@
 def _removeSender(senderkey):
     """Remove senderkey from connections."""
     _removeBackrefs(senderkey)
-    try:
-        del connections[senderkey]
-    except KeyError:
-        pass
-    # Senderkey will only be in senders dictionary if sender 
-    # could be weakly referenced.
-    try: 
-        del senders[senderkey]
-    except: 
-        pass
 
+    connections.pop(senderkey, None)
+    senders.pop(senderkey, None)
 
+
 def _removeBackrefs( senderkey):
     """Remove all back-references to this senderkey"""
-    try:
-        signals = connections[senderkey]
-    except KeyError:
-        signals = None
-    else:
-        items = signals.items()
-        def allReceivers( ):
-            for signal,set in items:
-                for item in set:
-                    yield item
-        for receiver in allReceivers():
+    for receiver_list in connections.pop(senderkey, {}).values():
+        for receiver in receiver_list:
             _killBackref( receiver, senderkey )
 
+
 def _removeOldBackRefs(senderkey, signal, receiver, receivers):
     """Kill old sendersBack references from receiver
 
@@ -483,13 +481,13 @@
 def _killBackref( receiver, senderkey ):
     """Do the actual removal of back reference from receiver to senderkey"""
     receiverkey = id(receiver)
-    set = sendersBack.get( receiverkey, () )
-    while senderkey in set:
+    receivers_list = sendersBack.get( receiverkey, () )
+    while senderkey in receivers_list:
         try:
-            set.remove( senderkey )
+            receivers_list.remove( senderkey )
         except:
             break
-    if not set:
+    if not receivers_list:
         try:
             del sendersBack[ receiverkey ]
         except KeyError:

Modified: cs/pythia/trunk/opal/forms/__init__.py
===================================================================
--- cs/pythia/trunk/opal/forms/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/forms/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,7 +2,7 @@
 from opal.core.exceptions import PermissionDenied
 from opal.utils.html import escape
 from opal.conf import settings
-from opal.utils.translation import gettext, gettext_lazy, ngettext
+from opal.utils.translation import gettext, ngettext
 
 FORM_FIELD_ID_PREFIX = 'id_'
 
@@ -54,6 +54,7 @@
     def get_validation_errors(self, new_data):
         "Returns dictionary mapping field_names to error-message lists"
         errors = {}
+        self.prepare(new_data)
         for field in self.fields:
             errors.update(field.get_validation_errors(new_data))
             val_name = 'validate_%s' % field.field_name
@@ -107,8 +108,13 @@
     This allows dictionary-style lookups of formfields. It also handles feeding
     prepopulated data and validation error messages to the formfield objects.
     """
-    def __init__(self, manipulator, data, error_dict, edit_inline=True):
-        self.manipulator, self.data = manipulator, data
+    def __init__(self, manipulator, data=None, error_dict=None, edit_inline=True):
+        self.manipulator = manipulator
+        if data is None:
+            data = {}
+        if error_dict is None:
+            error_dict = {}
+        self.data = data
         self.error_dict = error_dict
         self._inline_collections = None
         self.edit_inline = edit_inline
@@ -124,7 +130,9 @@
         if self.edit_inline:
             self.fill_inline_collections()
             for inline_collection in self._inline_collections:
-                if inline_collection.name == key:
+                # The 'orig_name' comparison is for backwards compatibility
+                # with hand-crafted forms.
+                if inline_collection.name == key or (':' not in key and inline_collection.orig_name == key):
                     return inline_collection
         raise KeyError, "Could not find Formfield or InlineObjectCollection named %r" % key
 
@@ -220,6 +228,9 @@
         self.errors = errors
         self._collections = None
         self.name = rel_obj.name
+        # This is the name used prior to fixing #1839. Needs for backwards
+        # compatibility.
+        self.orig_name = rel_obj.opts.module_name
 
     def __len__(self):
         self.fill()
@@ -343,7 +354,7 @@
     def get_validation_errors(self, new_data):
         errors = {}
         if self.is_required and not new_data.get(self.field_name, False):
-            errors.setdefault(self.field_name, []).append(gettext_lazy('This field is required.'))
+            errors.setdefault(self.field_name, []).append(gettext('This field is required.'))
             return errors
         try:
             for validator in self.validator_list:
@@ -434,11 +445,11 @@
             (self.get_id(), self.field_name, escape(data))
 
 class CheckboxField(FormField):
-    def __init__(self, field_name, checked_by_default=False, validator_list=None):
+    def __init__(self, field_name, checked_by_default=False, validator_list=None, is_required=False):
         if validator_list is None: validator_list = []
         self.field_name = field_name
         self.checked_by_default = checked_by_default
-        self.is_required = False # because the validator looks for these
+        self.is_required = is_required
         self.validator_list = validator_list[:]
 
     def render(self, data):
@@ -563,7 +574,7 @@
     "This SelectField provides 'Yes', 'No' and 'Unknown', mapping results to True, False or None"
     def __init__(self, field_name, is_required=False, validator_list=None):
         if validator_list is None: validator_list = []
-        SelectField.__init__(self, field_name, choices=[('1', 'Unknown'), ('2', 'Yes'), ('3', 'No')],
+        SelectField.__init__(self, field_name, choices=[('1', _('Unknown')), ('2', _('Yes')), ('3', _('No'))],
             is_required=is_required, validator_list=validator_list)
 
     def render(self, data):
@@ -638,9 +649,9 @@
             if str(value) in str_data_list:
                 checked_html = ' checked="checked"'
             field_name = '%s%s' % (self.field_name, value)
-            output.append('<li><input type="checkbox" id="%s" class="v%s" name="%s"%s /> <label for="%s">%s</label></li>' % \
-                (self.get_id() + value , self.__class__.__name__, field_name, checked_html,
-                self.get_id() + value, choice))
+            output.append('<li><input type="checkbox" id="%s" class="v%s" name="%s"%s value="on" /> <label for="%s">%s</label></li>' % \
+                (self.get_id() + escape(value), self.__class__.__name__, field_name, checked_html,
+                self.get_id() + escape(value), choice))
         output.append('</ul>')
         return '\n'.join(output)
 
@@ -743,7 +754,7 @@
         if validator_list is None: validator_list = []
         self.max_digits, self.decimal_places = max_digits, decimal_places
         validator_list = [self.isValidFloat] + validator_list
-        TextField.__init__(self, field_name, max_digits+1, max_digits+1, is_required, validator_list)
+        TextField.__init__(self, field_name, max_digits+2, max_digits+2, is_required, validator_list)
 
     def isValidFloat(self, field_data, all_data):
         v = validators.IsValidFloat(self.max_digits, self.decimal_places)
@@ -954,8 +965,7 @@
     def html2python(data):
         if data:
             return data.upper() # Should always be stored in upper case
-        else:
-            return None
+        return data
     html2python = staticmethod(html2python)
 
 class CommaSeparatedIntegerField(TextField):
@@ -972,9 +982,19 @@
         except validators.ValidationError, e:
             raise validators.CriticalValidationError, e.messages
 
+    def render(self, data):
+        if data is None:
+            data = ''
+        elif isinstance(data, (list, tuple)):
+            data = ','.join(data)
+        return super(CommaSeparatedIntegerField, self).render(data)
+
 class RawIdAdminField(CommaSeparatedIntegerField):
     def html2python(data):
-        return data.split(',')
+        if data:
+            return data.split(',')
+        else:
+            return []
     html2python = staticmethod(html2python)
 
 class XMLLargeTextField(LargeTextField):

Modified: cs/pythia/trunk/opal/http/__init__.py
===================================================================
--- cs/pythia/trunk/opal/http/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/http/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -38,7 +38,7 @@
 
     def get_full_path(self):
         return ''
-        
+
     def is_secure(self):
         return os.environ.get("HTTPS") == "on"
 
@@ -52,7 +52,7 @@
     POST = MultiValueDict()
     FILES = MultiValueDict()
     for submessage in msg.get_payload():
-        if isinstance(submessage, email.Message.Message):
+        if submessage and isinstance(submessage, email.Message.Message):
             name_dict = parse_header(submessage['Content-Disposition'])[1]
             # name_dict is something like {'name': 'file', 'filename': 'test.txt'} for file uploads
             # or {'name': 'blah'} for POST fields
@@ -160,11 +160,11 @@
         self._charset = settings.DEFAULT_CHARSET
         if not mimetype:
             mimetype = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE, settings.DEFAULT_CHARSET)
-        if hasattr(content, '__iter__'):
-            self._iterator = content
+        if not isinstance(content, basestring) and hasattr(content, '__iter__'):
+            self._container = content
             self._is_string = False
         else:
-            self._iterator = [content]
+            self._container = [content]
             self._is_string = True
         self.headers = {'Content-Type': mimetype}
         self.cookies = SimpleCookie()
@@ -203,39 +203,47 @@
             if val is not None:
                 self.cookies[key][var.replace('_', '-')] = val
 
-    def delete_cookie(self, key):
-        try:
-            self.cookies[key]['max_age'] = 0
-        except KeyError:
-            pass
+    def delete_cookie(self, key, path='/', domain=None):
+        self.cookies[key] = ''
+        if path is not None:
+            self.cookies[key]['path'] = path
+        if domain is not None:
+            self.cookies[key]['domain'] = domain
+        self.cookies[key]['expires'] = 0
+        self.cookies[key]['max-age'] = 0
 
     def _get_content(self):
-        content = ''.join(self._iterator)
+        content = ''.join(self._container)
         if isinstance(content, unicode):
             content = content.encode(self._charset)
         return content
 
     def _set_content(self, value):
-        self._iterator = [value]
+        self._container = [value]
         self._is_string = True
 
     content = property(_get_content, _set_content)
 
-    def _get_iterator(self):
-        "Output iterator. Converts data into client charset if necessary."
-        for chunk in self._iterator:
-            if isinstance(chunk, unicode):
-                chunk = chunk.encode(self._charset)
-            yield chunk
+    def __iter__(self):
+        self._iterator = self._container.__iter__()
+        return self
 
-    iterator = property(_get_iterator)
+    def next(self):
+        chunk = self._iterator.next()
+        if isinstance(chunk, unicode):
+            chunk = chunk.encode(self._charset)
+        return chunk
 
+    def close(self):
+        if hasattr(self._container, 'close'):
+            self._container.close()
+
     # The remaining methods partially implement the file-like object interface.
     # See http://docs.python.org/lib/bltin-file-objects.html
     def write(self, content):
         if not self._is_string:
             raise Exception, "This %s instance is not writable" % self.__class__
-        self._iterator.append(content)
+        self._container.append(content)
 
     def flush(self):
         pass
@@ -243,7 +251,7 @@
     def tell(self):
         if not self._is_string:
             raise Exception, "This %s instance cannot tell its position" % self.__class__
-        return sum([len(chunk) for chunk in self._iterator])
+        return sum([len(chunk) for chunk in self._container])
 
 class HttpResponseRedirect(HttpResponse):
     def __init__(self, redirect_to):

Modified: cs/pythia/trunk/opal/middleware/cache.py
===================================================================
--- cs/pythia/trunk/opal/middleware/cache.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/middleware/cache.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -41,6 +41,9 @@
 
     def process_request(self, request):
         "Checks whether the page is already cached and returns the cached version if available."
+        if self.cache_anonymous_only:
+            assert hasattr(request, 'user'), "The Django cache middleware with CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True requires authentication middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'opal.contrib.auth.middleware.AuthenticationMiddleware' before the CacheMiddleware."
+
         if not request.method in ('GET', 'HEAD') or request.GET:
             request._cache_update_cache = False
             return None # Don't bother checking the cache.

Modified: cs/pythia/trunk/opal/middleware/common.py
===================================================================
--- cs/pythia/trunk/opal/middleware/common.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/middleware/common.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,6 +2,7 @@
 from opal import http
 from opal.core.mail import mail_managers
 import md5
+import re
 
 class CommonMiddleware(object):
     """
@@ -61,11 +62,12 @@
                 # send a note to the managers.
                 domain = http.get_host(request)
                 referer = request.META.get('HTTP_REFERER', None)
-                is_internal = referer and (domain in referer)
+                is_internal = _is_internal_request(domain, referer)
                 path = request.get_full_path()
                 if referer and not _is_ignorable_404(path) and (is_internal or '?' not in referer):
+                    ua = request.META.get('HTTP_USER_AGENT', '<none>')
                     mail_managers("Broken %slink on %s" % ((is_internal and 'INTERNAL ' or ''), domain),
-                        "Referrer: %s\nRequested URL: %s\n" % (referer, request.get_full_path()))
+                        "Referrer: %s\nRequested URL: %s\nUser agent: %s\n" % (referer, request.get_full_path(), ua))
                 return response
 
         # Use ETags, if requested.
@@ -87,3 +89,8 @@
         if uri.endswith(end):
             return True
     return False
+
+def _is_internal_request(domain, referer):
+    "Return true if the referring URL is the same domain as the current request"
+    # Different subdomains are treated as different domains.
+    return referer is not None and re.match("^https?://%s/" % re.escape(domain), referer)

Modified: cs/pythia/trunk/opal/middleware/doc.py
===================================================================
--- cs/pythia/trunk/opal/middleware/doc.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/middleware/doc.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -7,11 +7,12 @@
     """
     def process_view(self, request, view_func, view_args, view_kwargs):
         """
-        If the request method is HEAD and the IP is internal, quickly return
-        with an x-header indicating the view function.  This is used by the
-        documentation module to lookup the view function for an arbitrary page.
+        If the request method is HEAD and either the IP is internal or the
+        user is a logged-in staff member, quickly return with an x-header
+        indicating the view function.  This is used by the documentation module
+        to lookup the view function for an arbitrary page.
         """
-        if request.method == 'HEAD' and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
+        if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (request.user.is_authenticated() and request.user.is_staff)):
             response = http.HttpResponse()
             response['X-View'] = "%s.%s" % (view_func.__module__, view_func.__name__)
             return response

Modified: cs/pythia/trunk/opal/middleware/gzip.py
===================================================================
--- cs/pythia/trunk/opal/middleware/gzip.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/middleware/gzip.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -12,7 +12,11 @@
     """
     def process_response(self, request, response):
         patch_vary_headers(response, ('Accept-Encoding',))
-        if response.has_header('Content-Encoding'):
+        
+        # Avoid gzipping if we've already got a content-encoding or if the
+        # content-type is Javascript (silly IE...)
+        is_js = "javascript" in response.headers.get('Content-Type', '').lower()
+        if response.has_header('Content-Encoding') or is_js:
             return response
 
         ae = request.META.get('HTTP_ACCEPT_ENCODING', '')
@@ -21,4 +25,5 @@
 
         response.content = compress_string(response.content)
         response['Content-Encoding'] = 'gzip'
+        response['Content-Length'] = str(len(response.content))
         return response

Modified: cs/pythia/trunk/opal/middleware/http.py
===================================================================
--- cs/pythia/trunk/opal/middleware/http.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/middleware/http.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -35,3 +35,27 @@
             response.content = ''
 
         return response
+
+class SetRemoteAddrFromForwardedFor(object):
+    """
+    Middleware that sets REMOTE_ADDR based on HTTP_X_FORWARDED_FOR, if the
+    latter is set. This is useful if you're sitting behind a reverse proxy that
+    causes each request's REMOTE_ADDR to be set to 127.0.0.1.
+
+    Note that this does NOT validate HTTP_X_FORWARDED_FOR. If you're not behind
+    a reverse proxy that sets HTTP_X_FORWARDED_FOR automatically, do not use
+    this middleware. Anybody can spoof the value of HTTP_X_FORWARDED_FOR, and
+    because this sets REMOTE_ADDR based on HTTP_X_FORWARDED_FOR, that means
+    anybody can "fake" their IP address. Only use this when you can absolutely
+    trust the value of HTTP_X_FORWARDED_FOR.
+    """
+    def process_request(self, request):
+        try:
+            real_ip = request.META['HTTP_X_FORWARDED_FOR']
+        except KeyError:
+            return None
+        else:
+            # HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs.
+            # Take just the first one.
+            real_ip = real_ip.split(",")[0]
+            request.META['REMOTE_ADDR'] = real_ip

Modified: cs/pythia/trunk/opal/scripts/compile-messages.py
===================================================================
--- cs/pythia/trunk/opal/scripts/compile-messages.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/scripts/compile-messages.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,9 +1,10 @@
 #!/usr/bin/env python
 
+import optparse
 import os
 import sys
 
-def compile_messages():
+def compile_messages(locale=None):
     basedir = None
 
     if os.path.isdir(os.path.join('conf', 'locale')):
@@ -11,10 +12,13 @@
     elif os.path.isdir('locale'):
         basedir = os.path.abspath('locale')
     else:
-        print "this script should be run from the django svn tree or your project or app tree"
+        print "This script should be run from the Django SVN tree or your project or app tree."
         sys.exit(1)
 
-    for (dirpath, dirnames, filenames) in os.walk(basedir):
+    if locale is not None:
+        basedir = os.path.join(basedir, locale, 'LC_MESSAGES')
+
+    for dirpath, dirnames, filenames in os.walk(basedir):
         for f in filenames:
             if f.endswith('.po'):
                 sys.stderr.write('processing file %s in %s\n' % (f, dirpath))
@@ -29,8 +33,17 @@
                 if sys.platform == 'win32': # Different shell-variable syntax
                     cmd = 'msgfmt -o "%djangocompilemo%" "%djangocompilepo%"'
                 else:
-                    cmd = 'msgfmt -o "$djangocompilemo" "$djangocompilepo"' 
+                    cmd = 'msgfmt -o "$djangocompilemo" "$djangocompilepo"'
                 os.system(cmd)
 
+def main():
+    parser = optparse.OptionParser()
+    parser.add_option('-l', '--locale', dest='locale',
+            help="The locale to process. Default is to process all.")
+    options, args = parser.parse_args()
+    if len(args):
+        parser.error("This program takes no arguments")
+    compile_messages(options.locale)
+
 if __name__ == "__main__":
-    compile_messages()
+    main()

Modified: cs/pythia/trunk/opal/scripts/daily_cleanup.py
===================================================================
--- cs/pythia/trunk/opal/scripts/daily_cleanup.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/scripts/daily_cleanup.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,16 +1,19 @@
-"Daily cleanup file"
+#!/usr/bin/env python
 
+"""
+Daily cleanup job.
+
+Can be run as a cronjob to clean out old data from the database (only expired
+sessions at the moment).
+"""
+
 from opal.db import backend, connection, transaction
 
-DOCUMENTATION_DIRECTORY = '/home/html/documentation/'
-
 def clean_up():
     # Clean up old database records
     cursor = connection.cursor()
     cursor.execute("DELETE FROM %s WHERE %s < NOW()" % \
-        (backend.quote_name('core_sessions'), backend.quote_name('expire_date')))
-    cursor.execute("DELETE FROM %s WHERE %s < NOW() - INTERVAL '1 week'" % \
-        (backend.quote_name('registration_challenges'), backend.quote_name('request_date')))
+        (backend.quote_name('opal_session'), backend.quote_name('expire_date')))
     transaction.commit_unless_managed()
 
 if __name__ == "__main__":

Modified: cs/pythia/trunk/opal/scripts/make-messages.py
===================================================================
--- cs/pythia/trunk/opal/scripts/make-messages.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/scripts/make-messages.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -81,7 +81,7 @@
                     src = pythonize_re.sub('\n#', src)
                     open(os.path.join(dirpath, '%s.py' % file), "wb").write(src)
                     thefile = '%s.py' % file
-                    cmd = 'xgettext %s -d %s -L Perl --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy -o - "%s"' % (
+                    cmd = 'xgettext %s -d %s -L Perl --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy --from-code UTF-8 -o - "%s"' % (
                         os.path.exists(potfile) and '--omit-header' or '', domain, os.path.join(dirpath, thefile))
                     (stdin, stdout, stderr) = os.popen3(cmd, 'b')
                     msgs = stdout.read()
@@ -103,7 +103,7 @@
                         open(os.path.join(dirpath, '%s.py' % file), "wb").write(templatize(src))
                         thefile = '%s.py' % file
                     if verbose: sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
-                    cmd = 'xgettext %s -d %s -L Python --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy -o - "%s"' % (
+                    cmd = 'xgettext %s -d %s -L Python --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy --from-code UTF-8 -o - "%s"' % (
                         os.path.exists(potfile) and '--omit-header' or '', domain, os.path.join(dirpath, thefile))
                     (stdin, stdout, stderr) = os.popen3(cmd, 'b')
                     msgs = stdout.read()

Modified: cs/pythia/trunk/opal/shortcuts/__init__.py
===================================================================
--- cs/pythia/trunk/opal/shortcuts/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/shortcuts/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -4,20 +4,29 @@
 
 from opal.template import loader
 from opal.http import HttpResponse, Http404
+from opal.db.models.manager import Manager
 
-
 def render_to_response(*args, **kwargs):
     return HttpResponse(loader.render_to_string(*args, **kwargs))
 load_and_render = render_to_response # For backwards compatibility.
 
 def get_object_or_404(klass, *args, **kwargs):
+    if isinstance(klass, Manager):
+        manager = klass
+        klass = manager.model
+    else:
+        manager = klass._default_manager
     try:
-        return klass._default_manager.get(*args, **kwargs)
+        return manager.get(*args, **kwargs)
     except klass.DoesNotExist:
-        raise Http404
+        raise Http404('No %s matches the given query.' % klass._meta.object_name)
 
 def get_list_or_404(klass, *args, **kwargs):
-    obj_list = list(klass._default_manager.filter(*args, **kwargs))
+    if isinstance(klass, Manager):
+        manager = klass
+    else:
+        manager = klass._default_manager
+    obj_list = list(manager.filter(*args, **kwargs))
     if not obj_list:
-        raise Http404
+        raise Http404('No %s matches the given query.' % manager.model._meta.object_name)
     return obj_list

Modified: cs/pythia/trunk/opal/sites/WebSite.py
===================================================================
--- cs/pythia/trunk/opal/sites/WebSite.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/sites/WebSite.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -55,6 +55,7 @@
     LANGUAGES = (
         ('ar', gettext_noop('Arabic')),
         ('bn', gettext_noop('Bengali')),
+        ('ca', gettext_noop('Catalan')),
         ('cs', gettext_noop('Czech')),
         ('cy', gettext_noop('Welsh')),
         ('da', gettext_noop('Danish')),
@@ -63,6 +64,7 @@
         ('en', gettext_noop('English')),
         ('es', gettext_noop('Spanish')),
         ('es_AR', gettext_noop('Argentinean Spanish')),
+        ('fi', gettext_noop('Finnish')),
         ('fr', gettext_noop('French')),
         ('gl', gettext_noop('Galician')),
         ('hu', gettext_noop('Hungarian')),
@@ -70,8 +72,13 @@
         ('is', gettext_noop('Icelandic')),
         ('it', gettext_noop('Italian')),
         ('ja', gettext_noop('Japanese')),
+        ('kn', gettext_noop('Kannada')),
+        ('lv', gettext_noop('Latvian')),
+        ('mk', gettext_noop('Macedonian')),
         ('nl', gettext_noop('Dutch')),
         ('no', gettext_noop('Norwegian')),
+        ('pl', gettext_noop('Polish')),
+        ('pt', gettext_noop('Portugese')),
         ('pt-br', gettext_noop('Brazilian')),
         ('ro', gettext_noop('Romanian')),
         ('ru', gettext_noop('Russian')),
@@ -80,6 +87,8 @@
         ('sr', gettext_noop('Serbian')),
         ('sv', gettext_noop('Swedish')),
         ('ta', gettext_noop('Tamil')),
+        ('te', gettext_noop('Telugu')),
+        ('tr', gettext_noop('Turkish')),
         ('uk', gettext_noop('Ukrainian')),
         ('zh-cn', gettext_noop('Simplified Chinese')),
         ('zh-tw', gettext_noop('Traditional Chinese')),
@@ -109,12 +118,13 @@
     # Database connection info.
     ### Perhaps this should be a facility.
     DATABASE_ENGINE    = pyre.str("database-engine",
-                                  validator=pyre.choice(['postgresql', 'mysql', 'sqlite3', 'ado_mssql']))
+                                  validator=pyre.choice(['postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3', 'ado_mssql']))
     DATABASE_NAME      = pyre.str("database-name")        # Or path to database file if using sqlite3.
     DATABASE_USER      = pyre.str("database-user")        # Not used with sqlite3.
     DATABASE_PASSWORD  = pyre.str("database-password")    # Not used with sqlite3.
     DATABASE_HOST      = pyre.str("database-host")        # Set to empty string for localhost. Not used with sqlite3.
     DATABASE_PORT      = pyre.str("database-port")        # Set to empty string for default. Not used with sqlite3.
+    DATABASE_OPTIONS   = {}                               # Set to empty dictionary for default.
 
     EMAIL_HOST = pyre.str("email-host", default="localhost")
     EMAIL_HOST.meta['tip'] = """Host for sending e-mail."""
@@ -224,11 +234,11 @@
     MONTH_DAY_FORMAT = pyre.str("month-day-format", default="F j")
     MONTH_DAY_FORMAT.meta['tip'] = """Default formatting for date objects when only the month and day are relevant. See all available format strings here: http://www.djangoproject.com/documentation/templates/#now"""
 
-    ENABLE_PSYCO = pyre.bool("enable-psyco", default=False)
-    ENABLE_PSYCO.meta['tip'] = """Whether to enable Psyco, which optimizes Python code. Requires Psyco. http://psyco.sourceforge.net/"""
-
     TRANSACTIONS_MANAGED = pyre.bool("transactions-managed", default=False)
     TRANSACTIONS_MANAGED.meta['tip'] = """Do you want to manage transactions manually? Hint: you really don't!"""
+    
+    URL_VALIDATOR_USER_AGENT = pyre.str("url-validator-user-agent", default="Django/0.96pre (http://www.djangoproject.com)")
+    URL_VALIDATOR_USER_AGENT.meta['tip'] = """The User-Agent string to use when checking for URL validity through the isExistingURL validator."""
 
 
     ##############
@@ -259,6 +269,9 @@
     
     session_cookie_domain = pyre.str("session-cookie-domain")
     session_cookie_domain.meta['tip'] = """A string like ".lawrence.com". Leave blank for standard domain cookie."""
+
+    SESSION_COOKIE_SECURE = pyre.bool("session-cookie-secure", default=False)
+    SESSION_COOKIE_SECURE.meta['tip'] = """Whether the session cookie should be secure (https:// only)."""
     
     SESSION_SAVE_EVERY_REQUEST = pyre.bool("session-save-every-request", default=False)
     SESSION_SAVE_EVERY_REQUEST.meta['tip'] = """Whether to save the session data on every request."""
@@ -283,6 +296,9 @@
 
     COMMENTS_ALLOW_PROFANITIES = pyre.bool("comments-allow-profanities", default=False)
 
+    PROFANITIES_LIST = pyre.list("profanities-list", default=['asshat', 'asshead', 'asshole', 'cunt', 'fuck', 'gook', 'nigger', 'shit'])
+    PROFANITIES_LIST.meta['tip'] = """The profanities that will trigger a validation error in the 'hasNoProfanities' validator. All of these should be in lowercase."""
+
     # The group ID that designates which users are banned.
     # Set to None if you're not using it.
     COMMENTS_BANNED_USERS_GROUP = None
@@ -311,6 +327,25 @@
         ])
 
 
+    ###########
+    # TESTING #
+    ###########
+
+    TEST_RUNNER = pyre.str("test-runner", default='opal.test.simple.run_tests')
+    TEST_RUNNER.meta['tip'] = """The name of the method to use to invoke the test suite"""
+
+    TEST_DATABASE_NAME = pyre.str("test-database-name")
+    TEST_DATABASE_NAME.meta['tip'] = """The name of the database to use for testing purposes. If empty, a name of 'test_' + DATABASE_NAME will be assumed"""
+
+
+    ############
+    # FIXTURES #
+    ############
+
+    FIXTURE_DIRS = pyre.list("fixture-dirs")
+    FIXTURE_DIRS.meta['tip'] = """The list of directories to search for fixtures"""
+
+
     #########
     # ????? #
     #########

Modified: cs/pythia/trunk/opal/template/__init__.py
===================================================================
--- cs/pythia/trunk/opal/template/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -66,6 +66,7 @@
 TOKEN_TEXT = 0
 TOKEN_VAR = 1
 TOKEN_BLOCK = 2
+TOKEN_COMMENT = 3
 
 # template syntax constants
 FILTER_SEPARATOR = '|'
@@ -75,6 +76,8 @@
 BLOCK_TAG_END = '%}'
 VARIABLE_TAG_START = '{{'
 VARIABLE_TAG_END = '}}'
+COMMENT_TAG_START = '{#'
+COMMENT_TAG_END = '#}'
 SINGLE_BRACE_START = '{'
 SINGLE_BRACE_END = '}'
 
@@ -85,8 +88,11 @@
 UNKNOWN_SOURCE="&lt;unknown source&gt;"
 
 # match a variable or block tag and capture the entire tag, including start/end delimiters
-tag_re = re.compile('(%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
-                                          re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END)))
+tag_re = re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
+                                          re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
+                                          re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END)))
+# matches if the string is valid number
+number_re = re.compile(r'[-+]?(\d+|\d*\.\d+)$')
 
 # global dictionary of libraries that have been loaded using get_library
 libraries = {}
@@ -113,8 +119,14 @@
     pass
 
 class VariableDoesNotExist(Exception):
-    pass
 
+    def __init__(self, msg, params=()):
+        self.msg = msg
+        self.params = params
+    
+    def __str__(self):
+        return self.msg % self.params
+    
 class InvalidTemplateLibrary(Exception):
     pass
 
@@ -137,13 +149,14 @@
         return self.source
 
 class Template(object):
-    def __init__(self, template_string, origin=None):
+    def __init__(self, template_string, origin=None, name='<Unknown Template>'):
         "Compilation stage"
         if settings.TEMPLATE_DEBUG and origin == None:
             origin = StringOrigin(template_string)
             # Could do some crazy stack-frame stuff to record where this string
             # came from...
         self.nodelist = compile_string(template_string, origin)
+        self.name = name
 
     def __iter__(self):
         for node in self.nodelist:
@@ -162,12 +175,12 @@
 
 class Token(object):
     def __init__(self, token_type, contents):
-        "The token_type must be TOKEN_TEXT, TOKEN_VAR or TOKEN_BLOCK"
+        "The token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or TOKEN_COMMENT"
         self.token_type, self.contents = token_type, contents
 
     def __str__(self):
         return '<%s token: "%s...">' % \
-            ({TOKEN_TEXT: 'Text', TOKEN_VAR: 'Var', TOKEN_BLOCK: 'Block'}[self.token_type],
+            ({TOKEN_TEXT: 'Text', TOKEN_VAR: 'Var', TOKEN_BLOCK: 'Block', TOKEN_COMMENT: 'Comment'}[self.token_type],
             self.contents[:20].replace('\n', ''))
 
     def split_contents(self):
@@ -190,6 +203,8 @@
             token = Token(TOKEN_VAR, token_string[len(VARIABLE_TAG_START):-len(VARIABLE_TAG_END)].strip())
         elif token_string.startswith(BLOCK_TAG_START):
             token = Token(TOKEN_BLOCK, token_string[len(BLOCK_TAG_START):-len(BLOCK_TAG_END)].strip())
+        elif token_string.startswith(COMMENT_TAG_START):
+            token = Token(TOKEN_COMMENT, '')
         else:
             token = Token(TOKEN_TEXT, token_string)
         return token
@@ -434,7 +449,7 @@
             while i < len(subject) and subject[i] != subject[p]:
                 i += 1
             if i >= len(subject):
-                raise TemplateSyntaxError, "Searching for value. Unexpected end of string in column %d: %s" % subject
+                raise TemplateSyntaxError, "Searching for value. Unexpected end of string in column %d: %s" % (i, subject)
             i += 1
             res = subject[p:i]
             while i < len(subject) and subject[i] in (' ', '\t'):
@@ -531,7 +546,7 @@
                 constant_arg, i18n_arg, var_arg = match.group("constant_arg", "i18n_arg", "var_arg")
                 if i18n_arg:
                     args.append((False, _(i18n_arg.replace(r'\"', '"'))))
-                elif constant_arg:
+                elif constant_arg is not None:
                     args.append((False, constant_arg.replace(r'\"', '"')))
                 elif var_arg:
                     args.append((True, var_arg))
@@ -548,9 +563,12 @@
             obj = resolve_variable(self.var, context)
         except VariableDoesNotExist:
             if ignore_failures:
-                return None
+                obj = None
             else:
-                return settings.TEMPLATE_STRING_IF_INVALID
+                if settings.TEMPLATE_STRING_IF_INVALID:
+                    return settings.TEMPLATE_STRING_IF_INVALID
+                else:
+                    obj = settings.TEMPLATE_STRING_IF_INVALID
         for func, args in self.filters:
             arg_vals = []
             for lookup, arg in args:
@@ -564,6 +582,8 @@
     def args_check(name, func, provided):
         provided = list(provided)
         plen = len(provided)
+        # Check to see if a decorator is providing the real function.
+        func = getattr(func, '_decorated_function', func)
         args, varargs, varkw, defaults = getargspec(func)
         # First argument is filter input.
         args.pop(0)
@@ -614,16 +634,9 @@
 
     (The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
     """
-    if path == 'False':
-        current = False
-    elif path == 'True':
-        current = True
-    elif path[0].isdigit():
+    if number_re.match(path):
         number_type = '.' in path and float or int
-        try:
-            current = number_type(path)
-        except ValueError:
-            current = settings.TEMPLATE_STRING_IF_INVALID
+        current = number_type(path)
     elif path[0] in ('"', "'") and path[0] == path[-1]:
         current = path[1:-1]
     else:
@@ -653,8 +666,12 @@
                 except (TypeError, AttributeError):
                     try: # list-index lookup
                         current = current[int(bits[0])]
-                    except (IndexError, ValueError, KeyError):
-                        raise VariableDoesNotExist, "Failed lookup for key [%s] in %r" % (bits[0], current) # missing attribute
+                    except (IndexError, # list index out of range
+                            ValueError, # invalid literal for int()
+                            KeyError,   # current is a dict without `int(bits[0])` key
+                            TypeError,  # unsubscriptable object
+                            ):
+                        raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bits[0], current)) # missing attribute
                 except Exception, e:
                     if getattr(e, 'silent_variable_failure', False):
                         current = settings.TEMPLATE_STRING_IF_INVALID
@@ -736,7 +753,11 @@
     def encode_output(self, output):
         # Check type so that we don't run str() on a Unicode object
         if not isinstance(output, basestring):
-            return str(output)
+            try:
+                return str(output)
+            except UnicodeEncodeError:
+                # If __str__() returns a Unicode object, convert it to bytestring.
+                return unicode(output).encode(settings.DEFAULT_CHARSET)
         elif isinstance(output, unicode):
             return output.encode(settings.DEFAULT_CHARSET)
         else:
@@ -796,7 +817,7 @@
             raise InvalidTemplateLibrary, "Unsupported arguments to Library.tag: (%r, %r)", (name, compile_function)
 
     def tag_function(self,func):
-        self.tags[func.__name__] = func
+        self.tags[getattr(func, "_decorated_function", func).__name__] = func
         return func
 
     def filter(self, name=None, filter_func=None):
@@ -820,7 +841,7 @@
             raise InvalidTemplateLibrary, "Unsupported arguments to Library.filter: (%r, %r)", (name, filter_func)
 
     def filter_function(self, func):
-        self.filters[func.__name__] = func
+        self.filters[getattr(func, "_decorated_function", func).__name__] = func
         return func
 
     def simple_tag(self,func):
@@ -834,9 +855,9 @@
                 resolved_vars = [resolve_variable(var, context) for var in self.vars_to_resolve]
                 return func(*resolved_vars)
 
-        compile_func = curry(generic_tag_compiler, params, defaults, func.__name__, SimpleNode)
+        compile_func = curry(generic_tag_compiler, params, defaults, getattr(func, "_decorated_function", func).__name__, SimpleNode)
         compile_func.__doc__ = func.__doc__
-        self.tag(func.__name__, compile_func)
+        self.tag(getattr(func, "_decorated_function", func).__name__, compile_func)
         return func
 
     def inclusion_tag(self, file_name, context_class=Context, takes_context=False):
@@ -862,14 +883,17 @@
                     dict = func(*args)
 
                     if not getattr(self, 'nodelist', False):
-                        from opal.template.loader import get_template
-                        t = get_template(file_name)
+                        from opal.template.loader import get_template, select_template
+                        if hasattr(file_name, '__iter__'):
+                            t = select_template(file_name)
+                        else:
+                            t = get_template(file_name)
                         self.nodelist = t.nodelist
                     return self.nodelist.render(context_class(dict))
 
-            compile_func = curry(generic_tag_compiler, params, defaults, func.__name__, InclusionNode)
+            compile_func = curry(generic_tag_compiler, params, defaults, getattr(func, "_decorated_function", func).__name__, InclusionNode)
             compile_func.__doc__ = func.__doc__
-            self.tag(func.__name__, compile_func)
+            self.tag(getattr(func, "_decorated_function", func).__name__, compile_func)
             return func
         return dec
 
@@ -877,7 +901,7 @@
     lib = libraries.get(module_name, None)
     if not lib:
         try:
-            mod = __import__(module_name, '', '', [''])
+            mod = __import__(module_name, {}, {}, [''])
         except ImportError, e:
             raise InvalidTemplateLibrary, "Could not load template library from %s, %s" % (module_name, e)
         try:

Modified: cs/pythia/trunk/opal/template/context.py
===================================================================
--- cs/pythia/trunk/opal/template/context.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/context.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -49,6 +49,9 @@
                 return True
         return False
 
+    def __contains__(self, key):
+        return self.has_key(key)
+
     def get(self, key, otherwise=None):
         for d in self.dicts:
             if d.has_key(key):
@@ -69,7 +72,7 @@
             i = path.rfind('.')
             module, attr = path[:i], path[i+1:]
             try:
-                mod = __import__(module, '', '', [attr])
+                mod = __import__(module, {}, {}, [attr])
             except ImportError, e:
                 raise ImproperlyConfigured, 'Error importing request processor module %s: "%s"' % (module, e)
             try:

Modified: cs/pythia/trunk/opal/template/defaultfilters.py
===================================================================
--- cs/pythia/trunk/opal/template/defaultfilters.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/defaultfilters.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -8,6 +8,38 @@
 
 register = Library()
 
+#######################
+# STRING DECORATOR    #
+#######################
+
+def smart_string(obj):
+    # FUTURE: Unicode strings should probably be normalized to a specific
+    # encoding and non-unicode strings should be converted to unicode too.
+#    if isinstance(obj, unicode):
+#        obj = obj.encode(settings.DEFAULT_CHARSET)
+#    else:
+#        obj = unicode(obj, settings.DEFAULT_CHARSET)
+    # FUTURE: Replace dumb string logic below with cool unicode logic above.
+    if not isinstance(obj, basestring):
+        obj = str(obj)
+    return obj
+
+def stringfilter(func):
+    """
+    Decorator for filters which should only receive strings. The object passed
+    as the first positional argument will be converted to a string.
+    """
+    def _dec(*args, **kwargs):
+        if args:
+            args = list(args)
+            args[0] = smart_string(args[0])
+        return func(*args, **kwargs)
+        
+    # Include a reference to the real function (used to check original
+    # arguments by the template parser).
+    _dec._decorated_function = getattr(func, '_decorated_function', func)
+    return _dec
+
 ###################
 # STRINGS         #
 ###################
@@ -15,32 +47,53 @@
 
 def addslashes(value):
     "Adds slashes - useful for passing strings to JavaScript, for example."
-    return value.replace('"', '\\"').replace("'", "\\'")
+    return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
+addslashes = stringfilter(addslashes)
 
 def capfirst(value):
     "Capitalizes the first character of the value"
-    value = str(value)
     return value and value[0].upper() + value[1:]
-
+capfirst = stringfilter(capfirst)
+ 
 def fix_ampersands(value):
     "Replaces ampersands with ``&amp;`` entities"
     from opal.utils.html import fix_ampersands
     return fix_ampersands(value)
+fix_ampersands = stringfilter(fix_ampersands)
 
-def floatformat(text):
+def floatformat(text, arg=-1):
     """
-    Displays a floating point number as 34.2 (with one decimal place) -- but
-    only if there's a point to be displayed
+    If called without an argument, displays a floating point
+    number as 34.2 -- but only if there's a point to be displayed.
+    With a positive numeric argument, it displays that many decimal places
+    always.
+    With a negative numeric argument, it will display that many decimal
+    places -- but only if there's places to be displayed.
+    Examples:
+
+    * num1 = 34.23234
+    * num2 = 34.00000
+    * num1|floatformat results in 34.2
+    * num2|floatformat is 34
+    * num1|floatformat:3 is 34.232
+    * num2|floatformat:3 is 34.000
+    * num1|floatformat:-3 is 34.232
+    * num2|floatformat:-3 is 34
     """
     try:
         f = float(text)
     except ValueError:
         return ''
+    try:
+        d = int(arg)
+    except ValueError:
+        return smart_string(f)
     m = f - int(f)
-    if m:
-        return '%.1f' % f
+    if not m and d < 0:
+        return '%d' % int(f)
     else:
-        return '%d' % int(f)
+        formatstr = '%%.%df' % abs(d)
+        return formatstr % f
 
 def linenumbers(value):
     "Displays text with line numbers"
@@ -51,22 +104,26 @@
     for i, line in enumerate(lines):
         lines[i] = ("%0" + width  + "d. %s") % (i + 1, escape(line))
     return '\n'.join(lines)
+linenumbers = stringfilter(linenumbers)
 
 def lower(value):
     "Converts a string into all lowercase"
     return value.lower()
+lower = stringfilter(lower)
 
 def make_list(value):
     """
     Returns the value turned into a list. For an integer, it's a list of
     digits. For a string, it's a list of characters.
     """
-    return list(str(value))
+    return list(value)
+make_list = stringfilter(make_list)
 
 def slugify(value):
     "Converts to lowercase, removes non-alpha chars and converts spaces to hyphens"
     value = re.sub('[^\w\s-]', '', value).strip().lower()
     return re.sub('[-\s]+', '-', value)
+slugify = stringfilter(slugify)
 
 def stringformat(value, arg):
     """
@@ -78,13 +135,14 @@
     of Python string formatting
     """
     try:
-        return ("%" + arg) % value
+        return ("%" + str(arg)) % value
     except (ValueError, TypeError):
         return ""
 
 def title(value):
     "Converts a string into titlecase"
     return re.sub("([a-z])'([A-Z])", lambda m: m.group(0).lower(), value.title())
+title = stringfilter(title)
 
 def truncatewords(value, arg):
     """
@@ -100,20 +158,42 @@
     if not isinstance(value, basestring):
         value = str(value)
     return truncate_words(value, length)
+truncatewords = stringfilter(truncatewords)
 
+def truncatewords_html(value, arg):
+    """
+    Truncates HTML after a certain number of words
+
+    Argument: Number of words to truncate after
+    """
+    from opal.utils.text import truncate_html_words
+    try:
+        length = int(arg)
+    except ValueError: # invalid literal for int()
+        return value # Fail silently.
+    if not isinstance(value, basestring):
+        value = str(value)
+    return truncate_html_words(value, length)
+truncatewords_html = stringfilter(truncatewords_html)
+
 def upper(value):
     "Converts a string into all uppercase"
     return value.upper()
+upper = stringfilter(upper)
 
 def urlencode(value):
     "Escapes a value for use in a URL"
     import urllib
+    if not isinstance(value, basestring):
+        value = str(value)
     return urllib.quote(value)
+urlencode = stringfilter(urlencode)
 
 def urlize(value):
     "Converts URLs in plain text into clickable links"
     from opal.utils.html import urlize
     return urlize(value, nofollow=True)
+urlize = stringfilter(urlize)
 
 def urlizetrunc(value, limit):
     """
@@ -124,10 +204,12 @@
     """
     from opal.utils.html import urlize
     return urlize(value, trim_url_limit=int(limit), nofollow=True)
+urlizetrunc = stringfilter(urlizetrunc)
 
 def wordcount(value):
     "Returns the number of words"
     return len(value.split())
+wordcount = stringfilter(wordcount)
 
 def wordwrap(value, arg):
     """
@@ -136,7 +218,8 @@
     Argument: number of characters to wrap the text at.
     """
     from opal.utils.text import wrap
-    return wrap(str(value), int(arg))
+    return wrap(value, int(arg))
+wordwrap = stringfilter(wordwrap)
 
 def ljust(value, arg):
     """
@@ -144,7 +227,8 @@
 
     Argument: field size
     """
-    return str(value).ljust(int(arg))
+    return value.ljust(int(arg))
+ljust = stringfilter(ljust)
 
 def rjust(value, arg):
     """
@@ -152,15 +236,18 @@
 
     Argument: field size
     """
-    return str(value).rjust(int(arg))
+    return value.rjust(int(arg))
+rjust = stringfilter(rjust)
 
 def center(value, arg):
     "Centers the value in a field of a given width"
-    return str(value).center(int(arg))
+    return value.center(int(arg))
+center = stringfilter(center)
 
 def cut(value, arg):
     "Removes all values of arg from the given string"
     return value.replace(arg, '')
+cut = stringfilter(cut)
 
 ###################
 # HTML STRINGS    #
@@ -170,15 +257,18 @@
     "Escapes a string's HTML"
     from opal.utils.html import escape
     return escape(value)
+escape = stringfilter(escape)
 
 def linebreaks(value):
     "Converts newlines into <p> and <br />s"
     from opal.utils.html import linebreaks
     return linebreaks(value)
+linebreaks = stringfilter(linebreaks)
 
 def linebreaksbr(value):
     "Converts newlines into <br />s"
     return value.replace('\n', '<br />')
+linebreaksbr = stringfilter(linebreaksbr)
 
 def removetags(value, tags):
     "Removes a space separated list of [X]HTML tags from the output"
@@ -189,13 +279,13 @@
     value = starttag_re.sub('', value)
     value = endtag_re.sub('', value)
     return value
+removetags = stringfilter(removetags)
 
 def striptags(value):
     "Strips all [X]HTML tags"
     from opal.utils.html import strip_tags
-    if not isinstance(value, basestring):
-        value = str(value)
     return strip_tags(value)
+striptags = stringfilter(striptags)
 
 ###################
 # LISTS           #
@@ -230,7 +320,7 @@
 def join(value, arg):
     "Joins a list with a string, like Python's ``str.join(list)``"
     try:
-        return arg.join(map(str, value))
+        return arg.join(map(smart_string, value))
     except AttributeError: # fail silently but nicely
         return value
 
@@ -339,7 +429,7 @@
 def time(value, arg=None):
     "Formats a time according to the given format"
     from opal.utils.dateformat import time_format
-    if not value:
+    if value in (None, ''):
         return ''
     if arg is None:
         arg = settings.TIME_FORMAT
@@ -421,7 +511,11 @@
     Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102
     bytes, etc).
     """
-    bytes = float(bytes)
+    try:
+        bytes = float(bytes)
+    except TypeError:
+        return "0 bytes"
+        
     if bytes < 1024:
         return "%d byte%s" % (bytes, bytes != 1 and 's' or '')
     if bytes < 1024 * 1024:
@@ -437,7 +531,7 @@
     is used instead. If the provided argument contains a comma, the text before
     the comma is used for the singular case.
     """
-    if not ',' in arg: 
+    if not ',' in arg:
         arg = ',' + arg
     bits = arg.split(',')
     if len(bits) > 2:
@@ -512,6 +606,7 @@
 register.filter(timeuntil)
 register.filter(title)
 register.filter(truncatewords)
+register.filter(truncatewords_html)
 register.filter(unordered_list)
 register.filter(upper)
 register.filter(urlencode)

Modified: cs/pythia/trunk/opal/template/defaulttags.py
===================================================================
--- cs/pythia/trunk/opal/template/defaulttags.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/defaulttags.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,7 +1,7 @@
 "Default tags used by the template system, available to all templates."
 
 from opal.template import Node, NodeList, Template, Context, resolve_variable
-from opal.template import TemplateSyntaxError, VariableDoesNotExist, BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END, SINGLE_BRACE_START, SINGLE_BRACE_END
+from opal.template import TemplateSyntaxError, VariableDoesNotExist, BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END, SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END
 from opal.template import get_library, Library, InvalidTemplateLibrary
 from opal.conf import settings
 import sys
@@ -13,14 +13,18 @@
         return ''
 
 class CycleNode(Node):
-    def __init__(self, cyclevars):
+    def __init__(self, cyclevars, variable_name=None):
         self.cyclevars = cyclevars
         self.cyclevars_len = len(cyclevars)
         self.counter = -1
+        self.variable_name = variable_name
 
     def render(self, context):
         self.counter += 1
-        return self.cyclevars[self.counter % self.cyclevars_len]
+        value = self.cyclevars[self.counter % self.cyclevars_len]
+        if self.variable_name:
+            context[self.variable_name] = value
+        return value
 
 class DebugNode(Node):
     def render(self, context):
@@ -86,7 +90,7 @@
             parentloop = {}
         context.push()
         try:
-            values = self.sequence.resolve(context)
+            values = self.sequence.resolve(context, True)
         except VariableDoesNotExist:
             values = []
         if values is None:
@@ -120,15 +124,27 @@
         return nodelist.render(context)
 
 class IfChangedNode(Node):
-    def __init__(self, nodelist):
+    def __init__(self, nodelist, *varlist):
         self.nodelist = nodelist
         self._last_seen = None
+        self._varlist = varlist
 
     def render(self, context):
-        content = self.nodelist.render(context)
-        if content != self._last_seen:
+        if context.has_key('forloop') and context['forloop']['first']:
+            self._last_seen = None
+        try:
+            if self._varlist:
+                # Consider multiple parameters.
+                # This automatically behaves like a OR evaluation of the multiple variables.
+                compare_to = [resolve_variable(var, context) for var in self._varlist]
+            else:
+                compare_to = self.nodelist.render(context)
+        except VariableDoesNotExist:
+            compare_to = None        
+
+        if  compare_to != self._last_seen:
             firstloop = (self._last_seen == None)
-            self._last_seen = content
+            self._last_seen = compare_to
             context.push()
             context['ifchanged'] = {'firstloop': firstloop}
             content = self.nodelist.render(context)
@@ -212,13 +228,13 @@
         self.var_name = var_name
 
     def render(self, context):
-        obj_list = self.target.resolve(context)
-        if obj_list == '': # target_var wasn't found in context; fail silently
+        obj_list = self.target.resolve(context, True)
+        if obj_list == None: # target_var wasn't found in context; fail silently
             context[self.var_name] = []
             return ''
         output = [] # list of dictionaries in the format {'grouper': 'key', 'list': [list of contents]}
         for obj in obj_list:
-            grouper = self.expression.resolve(Context({'var': obj}))
+            grouper = self.expression.resolve(Context({'var': obj}), True)
             # TODO: Is this a sensible way to determine equality?
             if output and repr(output[-1]['grouper']) == repr(grouper):
                 output[-1]['list'].append(obj)
@@ -251,7 +267,7 @@
             output = ''
         if self.parsed:
             try:
-                t = Template(output)
+                t = Template(output, name=self.filepath)
                 return t.render(context)
             except TemplateSyntaxError, e:
                 if settings.DEBUG:
@@ -289,6 +305,8 @@
                'closevariable': VARIABLE_TAG_END,
                'openbrace': SINGLE_BRACE_START,
                'closebrace': SINGLE_BRACE_END,
+               'opencomment': COMMENT_TAG_START,
+               'closecomment': COMMENT_TAG_END,
                }
 
     def __init__(self, tagtype):
@@ -297,6 +315,25 @@
     def render(self, context):
         return self.mapping.get(self.tagtype, '')
 
+class URLNode(Node):
+    def __init__(self, view_name, args, kwargs):
+        self.view_name = view_name
+        self.args = args
+        self.kwargs = kwargs
+      
+    def render(self, context):
+        from opal.core.urlresolvers import reverse, NoReverseMatch
+        args = [arg.resolve(context) for arg in self.args]
+        kwargs = dict([(k, v.resolve(context)) for k, v in self.kwargs.items()])
+        try:
+            return reverse(self.view_name, args=args, kwargs=kwargs)
+        except NoReverseMatch:
+            try:
+                project_name = settings.SETTINGS_MODULE.split('.')[0]
+                return reverse(project_name + '.' + self.view_name, args=args, kwargs=kwargs)
+            except NoReverseMatch:
+                return ''
+
 class WidthRatioNode(Node):
     def __init__(self, val_expr, max_expr, max_width):
         self.val_expr = val_expr
@@ -385,7 +422,7 @@
             raise TemplateSyntaxError("Second 'cycle' argument must be 'as'")
         cyclevars = [v for v in args[1].split(",") if v]    # split and kill blanks
         name = args[3]
-        node = CycleNode(cyclevars)
+        node = CycleNode(cyclevars, name)
 
         if not hasattr(parser, '_namedCycleNodes'):
             parser._namedCycleNodes = {}
@@ -398,6 +435,15 @@
 cycle = register.tag(cycle)
 
 def debug(parser, token):
+    """
+    Output a whole load of debugging information, including the current context and imported modules.
+
+    Sample usage::
+
+        <pre>
+            {% debug %}
+        </pre>
+    """
     return DebugNode()
 debug = register.tag(debug)
 
@@ -501,21 +547,6 @@
 do_for = register.tag("for", do_for)
 
 def do_ifequal(parser, token, negate):
-    """
-    Output the contents of the block if the two arguments equal/don't equal each other.
-
-    Examples::
-
-        {% ifequal user.id comment.user_id %}
-            ...
-        {% endifequal %}
-
-        {% ifnotequal user.id comment.user_id %}
-            ...
-        {% else %}
-            ...
-        {% endifnotequal %}
-    """
     bits = list(token.split_contents())
     if len(bits) != 3:
         raise TemplateSyntaxError, "%r takes two arguments" % bits[0]
@@ -531,11 +562,27 @@
 
 #@register.tag
 def ifequal(parser, token):
+    """
+    Output the contents of the block if the two arguments equal each other.
+
+    Examples::
+
+        {% ifequal user.id comment.user_id %}
+            ...
+        {% endifequal %}
+
+        {% ifnotequal user.id comment.user_id %}
+            ...
+        {% else %}
+            ...
+        {% endifnotequal %}
+    """
     return do_ifequal(parser, token, False)
 ifequal = register.tag(ifequal)
 
 #@register.tag
 def ifnotequal(parser, token):
+    """Output the contents of the block if the two arguments are not equal. See ifequal."""
     return do_ifequal(parser, token, True)
 ifnotequal = register.tag(ifnotequal)
 
@@ -626,23 +673,34 @@
     """
     Check if a value has changed from the last iteration of a loop.
 
-    The 'ifchanged' block tag is used within a loop. It checks its own rendered
-    contents against its previous state and only displays its content if the
-    value has changed::
+    The 'ifchanged' block tag is used within a loop. It has two possible uses.
 
-        <h1>Archive for {{ year }}</h1>
+    1. Checks its own rendered contents against its previous state and only
+       displays the content if it has changed. For example, this displays a list of
+       days, only displaying the month if it changes::
 
-        {% for date in days %}
-        {% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
-        <a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
-        {% endfor %}
+            <h1>Archive for {{ year }}</h1>
+
+            {% for date in days %}
+                {% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
+                <a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
+            {% endfor %}
+
+    2. If given a variable, check whether that variable has changed. For example, the
+       following shows the date every time it changes, but only shows the hour if both
+       the hour and the date have changed::
+
+            {% for date in days %}
+                {% ifchanged date.date %} {{ date.date }} {% endifchanged %}
+                {% ifchanged date.hour date.date %}
+                    {{ date.hour }}
+                {% endifchanged %}
+            {% endfor %}
     """
     bits = token.contents.split()
-    if len(bits) != 1:
-        raise TemplateSyntaxError, "'ifchanged' tag takes no arguments"
     nodelist = parser.parse(('endifchanged',))
     parser.delete_first_token()
-    return IfChangedNode(nodelist)
+    return IfChangedNode(nodelist, *bits[1:])
 ifchanged = register.tag(ifchanged)
 
 #@register.tag
@@ -825,6 +883,8 @@
         ``closevariable``   ``}}``
         ``openbrace``       ``{``
         ``closebrace``      ``}``
+        ``opencomment``     ``{#``
+        ``closecomment``    ``#}``
         ==================  =======
     """
     bits = token.contents.split()
@@ -837,6 +897,51 @@
     return TemplateTagNode(tag)
 templatetag = register.tag(templatetag)
 
+def url(parser, token):
+    """
+    Returns an absolute URL matching given view with its parameters. 
+    
+    This is a way to define links that aren't tied to a particular URL configuration::
+    
+        {% url path.to.some_view arg1,arg2,name1=value1 %}
+    
+    The first argument is a path to a view. It can be an absolute python path
+    or just ``app_name.view_name`` without the project name if the view is
+    located inside the project.  Other arguments are comma-separated values
+    that will be filled in place of positional and keyword arguments in the
+    URL. All arguments for the URL should be present.
+
+    For example if you have a view ``app_name.client`` taking client's id and
+    the corresponding line in a URLconf looks like this::
+    
+        ('^client/(\d+)/$', 'app_name.client')
+    
+    and this app's URLconf is included into the project's URLconf under some
+    path::
+    
+        ('^clients/', include('project_name.app_name.urls'))
+    
+    then in a template you can create a link for a certain client like this::
+    
+        {% url app_name.client client.id %}
+    
+    The URL will look like ``/clients/client/123/``.
+    """
+    bits = token.contents.split(' ', 2)
+    if len(bits) < 2:
+        raise TemplateSyntaxError, "'%s' takes at least one argument (path to a view)" % bits[0]
+    args = []
+    kwargs = {}
+    if len(bits) > 2:
+        for arg in bits[2].split(','):
+            if '=' in arg:
+                k, v = arg.split('=', 1)
+                kwargs[k] = parser.compile_filter(v)
+            else:
+                args.append(parser.compile_filter(arg))
+    return URLNode(bits[1], args, kwargs)
+url = register.tag(url)
+
 #@register.tag
 def widthratio(parser, token):
     """

Modified: cs/pythia/trunk/opal/template/loader.py
===================================================================
--- cs/pythia/trunk/opal/template/loader.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/loader.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -76,14 +76,16 @@
     Returns a compiled Template object for the given template name,
     handling template inheritance recursively.
     """
-    return get_template_from_string(*find_template_source(template_name))
+    source, origin = find_template_source(template_name)
+    template = get_template_from_string(source, origin, template_name)
+    return template
 
-def get_template_from_string(source, origin=None):
+def get_template_from_string(source, origin=None, name=None):
     """
     Returns a compiled Template object for the given template code,
     handling template inheritance recursively.
     """
-    return Template(source, origin)
+    return Template(source, origin, name)
 
 def render_to_string(template_name, dictionary=None, context_instance=None):
     """

Modified: cs/pythia/trunk/opal/template/loader_tags.py
===================================================================
--- cs/pythia/trunk/opal/template/loader_tags.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/loader_tags.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -51,13 +51,13 @@
                 error_msg += " Got this from the %r variable." % self.parent_name_expr #TODO nice repr.
             raise TemplateSyntaxError, error_msg
         if hasattr(parent, 'render'):
-            return parent
+            return parent # parent is a Template object
         try:
             source, origin = find_template_source(parent, self.template_dirs)
         except TemplateDoesNotExist:
             raise TemplateSyntaxError, "Template %r cannot be extended, because it doesn't exist" % parent
         else:
-            return get_template_from_string(source, origin)
+            return get_template_from_string(source, origin, parent)
 
     def render(self, context):
         compiled_parent = self.get_parent(context)
@@ -129,7 +129,7 @@
         parser.__loaded_blocks.append(block_name)
     except AttributeError: # parser.__loaded_blocks isn't a list yet
         parser.__loaded_blocks = [block_name]
-    nodelist = parser.parse(('endblock',))
+    nodelist = parser.parse(('endblock', 'endblock %s' % block_name))
     parser.delete_first_token()
     return BlockNode(block_name, nodelist)
 

Modified: cs/pythia/trunk/opal/template/loaders/app_directories.py
===================================================================
--- cs/pythia/trunk/opal/template/loaders/app_directories.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/loaders/app_directories.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -15,9 +15,9 @@
         m, a = app[:i], app[i+1:]
     try:
         if a is None:
-            mod = __import__(m, '', '', [])
+            mod = __import__(m, {}, {}, [])
         else:
-            mod = getattr(__import__(m, '', '', [a]), a)
+            mod = getattr(__import__(m, {}, {}, [a]), a)
     except ImportError, e:
         raise ImproperlyConfigured, 'ImportError %s: %s' % (app, e.args[0])
     template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates')

Modified: cs/pythia/trunk/opal/template/loaders/filesystem.py
===================================================================
--- cs/pythia/trunk/opal/template/loaders/filesystem.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/template/loaders/filesystem.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -17,7 +17,7 @@
             return (open(filepath).read(), filepath)
         except IOError:
             tried.append(filepath)
-    if template_dirs:
+    if tried:
         error_msg = "Tried %s" % tried
     else:
         error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."

Modified: cs/pythia/trunk/opal/templatetags/__init__.py
===================================================================
--- cs/pythia/trunk/opal/templatetags/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/templatetags/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,6 +2,6 @@
 
 for a in settings.INSTALLED_APPS:
     try:
-        __path__.extend(__import__(a + '.templatetags', '', '', ['']).__path__)
+        __path__.extend(__import__(a + '.templatetags', {}, {}, ['']).__path__)
     except ImportError:
         pass

Modified: cs/pythia/trunk/opal/utils/datastructures.py
===================================================================
--- cs/pythia/trunk/opal/utils/datastructures.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/datastructures.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -14,7 +14,13 @@
                 pass
         raise KeyError
 
-    def get(self, key, default):
+    def __contains__(self, key):
+        return self.has_key(key)
+        
+    def __copy__(self): 
+        return self.__class__(*self.dicts) 
+
+    def get(self, key, default=None):
         try:
             return self[key]
         except KeyError:
@@ -39,6 +45,10 @@
             if dict.has_key(key):
                 return True
         return False
+        
+    def copy(self): 
+        """ returns a copy of this object""" 
+        return self.__copy__()
 
 class SortedDict(dict):
     "A dictionary that keeps its keys in the order in which they're inserted."
@@ -67,7 +77,7 @@
         return self.keyOrder[:]
 
     def values(self):
-        return [dict.__getitem__(self,k) for k in self.keyOrder]
+        return [dict.__getitem__(self, k) for k in self.keyOrder]
 
     def update(self, dict):
         for k, v in dict.items():
@@ -78,6 +88,17 @@
             self.keyOrder.append(key)
         return dict.setdefault(self, key, default)
 
+    def value_for_index(self, index):
+        "Returns the value of the item at the given zero-based index."
+        return self[self.keyOrder[index]]
+
+    def copy(self):
+        "Returns a copy of this object."
+        # This way of initializing the copy means it works for subclasses, too.
+        obj = self.__class__(self)
+        obj.keyOrder = self.keyOrder
+        return obj
+
 class MultiValueDictKeyError(KeyError):
     pass
 
@@ -187,17 +208,23 @@
         "Returns a copy of this object."
         return self.__deepcopy__()
 
-    def update(self, other_dict):
-        "update() extends rather than replaces existing key lists."
-        if isinstance(other_dict, MultiValueDict):
-            for key, value_list in other_dict.lists():
-                self.setlistdefault(key, []).extend(value_list)
-        else:
-            try:
-                for key, value in other_dict.items():
-                    self.setlistdefault(key, []).append(value)
-            except TypeError:
-                raise ValueError, "MultiValueDict.update() takes either a MultiValueDict or dictionary"
+    def update(self, *args, **kwargs):
+        "update() extends rather than replaces existing key lists. Also accepts keyword args."
+        if len(args) > 1:
+            raise TypeError, "update expected at most 1 arguments, got %d", len(args)
+        if args:
+            other_dict = args[0]
+            if isinstance(other_dict, MultiValueDict):
+                for key, value_list in other_dict.lists():
+                    self.setlistdefault(key, []).extend(value_list)
+            else:
+                try:
+                    for key, value in other_dict.items():
+                        self.setlistdefault(key, []).append(value)
+                except TypeError:
+                    raise ValueError, "MultiValueDict.update() takes either a MultiValueDict or dictionary"
+        for key, value in kwargs.iteritems():
+            self.setlistdefault(key, []).append(value)
 
 class DotExpandedDict(dict):
     """

Modified: cs/pythia/trunk/opal/utils/dateformat.py
===================================================================
--- cs/pythia/trunk/opal/utils/dateformat.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/dateformat.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -11,12 +11,13 @@
 >>>
 """
 
-from opal.utils.dates import MONTHS, MONTHS_AP, WEEKDAYS
+from opal.utils.dates import MONTHS, MONTHS_3, MONTHS_AP, WEEKDAYS
 from opal.utils.tzinfo import LocalTimezone
+from opal.utils.translation import gettext as _
 from calendar import isleap, monthrange
 import re, time
 
-re_formatchars = re.compile(r'(?<!\\)([aABdDfFgGhHiIjlLmMnNOPrsStTUwWyYzZ])')
+re_formatchars = re.compile(r'(?<!\\)([aAbBdDfFgGhHiIjlLmMnNOPrsStTUwWyYzZ])')
 re_escaped = re.compile(r'\\(.)')
 
 class Formatter(object):
@@ -36,14 +37,14 @@
     def a(self):
         "'a.m.' or 'p.m.'"
         if self.data.hour > 11:
-            return 'p.m.'
-        return 'a.m.'
+            return _('p.m.')
+        return _('a.m.')
 
     def A(self):
         "'AM' or 'PM'"
         if self.data.hour > 11:
-            return 'PM'
-        return 'AM'
+            return _('PM')
+        return _('AM')
 
     def B(self):
         "Swatch Internet time"
@@ -91,9 +92,9 @@
         Proprietary extension.
         """
         if self.data.minute == 0 and self.data.hour == 0:
-            return 'midnight'
+            return _('midnight')
         if self.data.minute == 0 and self.data.hour == 12:
-            return 'noon'
+            return _('noon')
         return '%s %s' % (self.f(), self.a())
 
     def s(self):
@@ -110,6 +111,10 @@
         if hasattr(self.data, 'hour') and not self.timezone:
             self.timezone = LocalTimezone(dt)
 
+    def b(self):
+        "Month, textual, 3 letters, lowercase; e.g. 'jan'"
+        return MONTHS_3[self.data.month]
+
     def d(self):
         "Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
         return '%02d' % self.data.day
@@ -147,7 +152,7 @@
 
     def M(self):
         "Month, textual, 3 letters; e.g. 'Jan'"
-        return MONTHS[self.data.month][0:3]
+        return MONTHS_3[self.data.month].title()
 
     def n(self):
         "Month without leading zeros; i.e. '1' to '12'"

Modified: cs/pythia/trunk/opal/utils/feedgenerator.py
===================================================================
--- cs/pythia/trunk/opal/utils/feedgenerator.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/feedgenerator.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -40,7 +40,7 @@
     "Base class for all syndication feeds. Subclasses should provide write()"
     def __init__(self, title, link, description, language=None, author_email=None,
             author_name=None, author_link=None, subtitle=None, categories=None,
-            feed_url=None):
+            feed_url=None, feed_copyright=None):
         self.feed = {
             'title': title,
             'link': link,
@@ -52,12 +52,13 @@
             'subtitle': subtitle,
             'categories': categories or (),
             'feed_url': feed_url,
+            'feed_copyright': feed_copyright,
         }
         self.items = []
 
     def add_item(self, title, link, description, author_email=None,
         author_name=None, author_link=None, pubdate=None, comments=None,
-        unique_id=None, enclosure=None, categories=()):
+        unique_id=None, enclosure=None, categories=(), item_copyright=None):
         """
         Adds an item to the feed. All args are expected to be Python Unicode
         objects except pubdate, which is a datetime.datetime object, and
@@ -75,6 +76,7 @@
             'unique_id': unique_id,
             'enclosure': enclosure,
             'categories': categories or (),
+            'item_copyright': item_copyright,
         })
 
     def num_items(self):
@@ -128,6 +130,8 @@
             handler.addQuickElement(u"language", self.feed['language'])
         for cat in self.feed['categories']:
             handler.addQuickElement(u"category", cat)
+        if self.feed['feed_copyright'] is not None:
+            handler.addQuickElement(u"copyright", self.feed['feed_copyright'])
         self.write_items(handler)
         self.endChannelElement(handler)
         handler.endElement(u"rss")
@@ -212,6 +216,8 @@
             handler.addQuickElement(u"subtitle", self.feed['subtitle'])
         for cat in self.feed['categories']:
             handler.addQuickElement(u"category", "", {u"term": cat})
+        if self.feed['feed_copyright'] is not None:
+            handler.addQuickElement(u"rights", self.feed['feed_copyright'])
         self.write_items(handler)
         handler.endElement(u"feed")
 
@@ -252,10 +258,14 @@
                      u"length": item['enclosure'].length,
                      u"type": item['enclosure'].mime_type})
 
-            # Categories:
+            # Categories.
             for cat in item['categories']:
                 handler.addQuickElement(u"category", u"", {u"term": cat})
 
+            # Rights.
+            if item['item_copyright'] is not None:
+                handler.addQuickElement(u"rights", item['item_copyright'])
+
             handler.endElement(u"entry")
 
 # This isolates the decision of what the system default is, so calling code can

Modified: cs/pythia/trunk/opal/utils/functional.py
===================================================================
--- cs/pythia/trunk/opal/utils/functional.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/functional.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,6 +1,6 @@
-def curry(*args, **kwargs):
+def curry(_curried_func, *args, **kwargs):
     def _curried(*moreargs, **morekwargs):
-        return args[0](*(args[1:]+moreargs), **dict(kwargs.items() + morekwargs.items()))
+        return _curried_func(*(args+moreargs), **dict(kwargs, **morekwargs))
     return _curried
 
 class Promise:

Copied: cs/pythia/trunk/opal/utils/itercompat.py (from rev 7736, vendor/django/current/django/utils/itercompat.py)

Modified: cs/pythia/trunk/opal/utils/simplejson/LICENSE.txt
===================================================================
--- cs/pythia/trunk/opal/utils/simplejson/LICENSE.txt	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/simplejson/LICENSE.txt	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,4 +1,4 @@
-simplejson 1.3
+simplejson 1.5
 Copyright (c) 2006 Bob Ippolito
 
 Permission is hereby granted, free of charge, to any person obtaining a copy of

Modified: cs/pythia/trunk/opal/utils/simplejson/__init__.py
===================================================================
--- cs/pythia/trunk/opal/utils/simplejson/__init__.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/simplejson/__init__.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -27,6 +27,21 @@
     >>> io.getvalue()
     '["streaming API"]'
 
+Compact encoding::
+
+    >>> import simplejson
+    >>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+    '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+    >>> import simplejson
+    >>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+    {
+        "4": 5, 
+        "6": 7
+    }
+
 Decoding JSON::
     
     >>> import simplejson
@@ -68,10 +83,10 @@
     ['[', '2.0', ', ', '1.0', ']']
     
 
-Note that the JSON produced by this module is a subset of YAML,
-so it may be used as a serializer for that as well.
+Note that the JSON produced by this module's default settings
+is a subset of YAML, so it may be used as a serializer for that as well.
 """
-__version__ = '1.3'
+__version__ = '1.5'
 __all__ = [
     'dump', 'dumps', 'load', 'loads',
     'JSONDecoder', 'JSONEncoder',
@@ -81,7 +96,7 @@
 from opal.utils.simplejson.encoder import JSONEncoder
 
 def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
-        allow_nan=True, cls=None, **kw):
+        allow_nan=True, cls=None, indent=None, **kw):
     """
     Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
     ``.write()``-supporting file-like object).
@@ -105,6 +120,10 @@
     in strict compliance of the JSON specification, instead of using the
     JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
 
+    If ``indent`` is a non-negative integer, then JSON array elements and object
+    members will be pretty-printed with that indent level.  An indent level
+    of 0 will only insert newlines.  ``None`` is the most compact representation.
+
     To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
     ``.default()`` method to serialize additional types), specify it with
     the ``cls`` kwarg.
@@ -112,7 +131,7 @@
     if cls is None:
         cls = JSONEncoder
     iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
-        check_circular=check_circular, allow_nan=allow_nan,
+        check_circular=check_circular, allow_nan=allow_nan, indent=indent,
         **kw).iterencode(obj)
     # could accelerate with writelines in some versions of Python, at
     # a debuggability cost
@@ -120,7 +139,7 @@
         fp.write(chunk)
 
 def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
-        allow_nan=True, cls=None, **kw):
+        allow_nan=True, cls=None, indent=None, separators=None, **kw):
     """
     Serialize ``obj`` to a JSON formatted ``str``.
 
@@ -141,14 +160,26 @@
     strict compliance of the JSON specification, instead of using the
     JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
 
+    If ``indent`` is a non-negative integer, then JSON array elements and
+    object members will be pretty-printed with that indent level.  An indent
+    level of 0 will only insert newlines.  ``None`` is the most compact
+    representation.
+
+    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+    then it will be used instead of the default ``(', ', ': ')`` separators.
+    ``(',', ':')`` is the most compact JSON representation.
+
     To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
     ``.default()`` method to serialize additional types), specify it with
     the ``cls`` kwarg.
     """
     if cls is None:
         cls = JSONEncoder
-    return cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
-        check_circular=check_circular, allow_nan=allow_nan, **kw).encode(obj)
+    return cls(
+        skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+        check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+        separators=separators,
+        **kw).encode(obj)
 
 def load(fp, encoding=None, cls=None, object_hook=None, **kw):
     """

Modified: cs/pythia/trunk/opal/utils/simplejson/decoder.py
===================================================================
--- cs/pythia/trunk/opal/utils/simplejson/decoder.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/simplejson/decoder.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -127,6 +127,7 @@
         raise ValueError(errmsg("Expecting property name", s, end))
     end += 1
     encoding = getattr(context, 'encoding', None)
+    iterscan = JSONScanner.iterscan
     while True:
         key, end = scanstring(s, end, encoding)
         end = _w(s, end).end()
@@ -134,7 +135,7 @@
             raise ValueError(errmsg("Expecting : delimiter", s, end))
         end = _w(s, end + 1).end()
         try:
-            value, end = JSONScanner.iterscan(s, idx=end).next()
+            value, end = iterscan(s, idx=end, context=context).next()
         except StopIteration:
             raise ValueError(errmsg("Expecting object", s, end))
         pairs[key] = value
@@ -164,9 +165,10 @@
     nextchar = s[end:end + 1]
     if nextchar == ']':
         return values, end + 1
+    iterscan = JSONScanner.iterscan
     while True:
         try:
-            value, end = JSONScanner.iterscan(s, idx=end).next()
+            value, end = iterscan(s, idx=end, context=context).next()
         except StopIteration:
             raise ValueError(errmsg("Expecting object", s, end))
         values.append(value)

Modified: cs/pythia/trunk/opal/utils/simplejson/encoder.py
===================================================================
--- cs/pythia/trunk/opal/utils/simplejson/encoder.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/simplejson/encoder.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -3,11 +3,11 @@
 """
 import re
 
-# this should match any kind of infinity
-INFCHARS = re.compile(r'[infINF]')
 ESCAPE = re.compile(r'[\x00-\x19\\"\b\f\n\r\t]')
-ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+ESCAPE_ASCII = re.compile(r'([\\"/]|[^\ -~])')
 ESCAPE_DCT = {
+    # escape all forward slashes to prevent </script> attack
+    '/': '\\/',
     '\\': '\\\\',
     '"': '\\"',
     '\b': '\\b',
@@ -16,32 +16,32 @@
     '\r': '\\r',
     '\t': '\\t',
 }
-for i in range(20):
+for i in range(0x20):
     ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
 
+# assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+
 def floatstr(o, allow_nan=True):
-    s = str(o)
-    # If the first non-sign is a digit then it's not a special value
-    if (o < 0.0 and s[1].isdigit()) or s[0].isdigit():
-        return s
-    elif not allow_nan:
+    # Check for specials.  Note that this type of test is processor- and/or
+    # platform-specific, so do tests which don't depend on the internals.
+
+    if o != o:
+        text = 'NaN'
+    elif o == INFINITY:
+        text = 'Infinity'
+    elif o == -INFINITY:
+        text = '-Infinity'
+    else:
+        return str(o)
+
+    if not allow_nan:
         raise ValueError("Out of range float values are not JSON compliant: %r"
             % (o,))
-    # These are the string representations on the platforms I've tried
-    if s == 'nan':
-        return 'NaN'
-    if s == 'inf':
-        return 'Infinity'
-    if s == '-inf':
-        return '-Infinity'
-    # NaN should either be inequal to itself, or equal to everything
-    if o != o or o == 0.0:
-        return 'NaN'
-    # Last ditch effort, assume inf
-    if o < 0:
-        return '-Infinity'
-    return 'Infinity'
 
+    return text
+
+
 def encode_basestring(s):
     """
     Return a JSON representation of a Python string
@@ -90,8 +90,11 @@
     implementation (to raise ``TypeError``).
     """
     __all__ = ['__init__', 'default', 'encode', 'iterencode']
+    item_separator = ', '
+    key_separator = ': '
     def __init__(self, skipkeys=False, ensure_ascii=True,
-            check_circular=True, allow_nan=True, sort_keys=False):
+            check_circular=True, allow_nan=True, sort_keys=False,
+            indent=None, separators=None):
         """
         Constructor for JSONEncoder, with sensible defaults.
 
@@ -116,6 +119,15 @@
         If sort_keys is True, then the output of dictionaries will be
         sorted by key; this is useful for regression tests to ensure
         that JSON serializations can be compared on a day-to-day basis.
+
+        If indent is a non-negative integer, then JSON array
+        elements and object members will be pretty-printed with that
+        indent level.  An indent level of 0 will only insert newlines.
+        None is the most compact representation.
+
+        If specified, separators should be a (item_separator, key_separator)
+        tuple. The default is (', ', ': '). To get the most compact JSON
+        representation you should specify (',', ':') to eliminate whitespace.
         """
 
         self.skipkeys = skipkeys
@@ -123,7 +135,14 @@
         self.check_circular = check_circular
         self.allow_nan = allow_nan
         self.sort_keys = sort_keys
+        self.indent = indent
+        self.current_indent_level = 0
+        if separators is not None:
+            self.item_separator, self.key_separator = separators
 
+    def _newline_indent(self):
+        return '\n' + (' ' * (self.indent * self.current_indent_level))
+
     def _iterencode_list(self, lst, markers=None):
         if not lst:
             yield '[]'
@@ -134,14 +153,25 @@
                 raise ValueError("Circular reference detected")
             markers[markerid] = lst
         yield '['
+        if self.indent is not None:
+            self.current_indent_level += 1
+            newline_indent = self._newline_indent()
+            separator = self.item_separator + newline_indent
+            yield newline_indent
+        else:
+            newline_indent = None
+            separator = self.item_separator
         first = True
         for value in lst:
             if first:
                 first = False
             else:
-                yield ', '
+                yield separator
             for chunk in self._iterencode(value, markers):
                 yield chunk
+        if newline_indent is not None:
+            self.current_indent_level -= 1
+            yield self._newline_indent()
         yield ']'
         if markers is not None:
             del markers[markerid]
@@ -156,6 +186,15 @@
                 raise ValueError("Circular reference detected")
             markers[markerid] = dct
         yield '{'
+        key_separator = self.key_separator
+        if self.indent is not None:
+            self.current_indent_level += 1
+            newline_indent = self._newline_indent()
+            item_separator = self.item_separator + newline_indent
+            yield newline_indent
+        else:
+            newline_indent = None
+            item_separator = self.item_separator
         first = True
         if self.ensure_ascii:
             encoder = encode_basestring_ascii
@@ -165,7 +204,7 @@
         if self.sort_keys:
             keys = dct.keys()
             keys.sort()
-            items = [(k,dct[k]) for k in keys]
+            items = [(k, dct[k]) for k in keys]
         else:
             items = dct.iteritems()
         for key, value in items:
@@ -190,11 +229,14 @@
             if first:
                 first = False
             else:
-                yield ', '
+                yield item_separator
             yield encoder(key)
-            yield ': '
+            yield key_separator
             for chunk in self._iterencode(value, markers):
                 yield chunk
+        if newline_indent is not None:
+            self.current_indent_level -= 1
+            yield self._newline_indent()
         yield '}'
         if markers is not None:
             del markers[markerid]

Copied: cs/pythia/trunk/opal/utils/simplejson/jsonfilter.py (from rev 7736, vendor/django/current/django/utils/simplejson/jsonfilter.py)

Modified: cs/pythia/trunk/opal/utils/simplejson/scanner.py
===================================================================
--- cs/pythia/trunk/opal/utils/simplejson/scanner.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/simplejson/scanner.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -3,7 +3,7 @@
 """
 import sre_parse, sre_compile, sre_constants
 from sre_constants import BRANCH, SUBPATTERN
-from sre import VERBOSE, MULTILINE, DOTALL
+from re import VERBOSE, MULTILINE, DOTALL
 import re
 
 __all__ = ['Scanner', 'pattern']

Modified: cs/pythia/trunk/opal/utils/text.py
===================================================================
--- cs/pythia/trunk/opal/utils/text.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/utils/text.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -8,17 +8,28 @@
 def wrap(text, width):
     """
     A word-wrap function that preserves existing line breaks and most spaces in
-    the text. Expects that existing line breaks are posix newlines (\n).
-    See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/148061
+    the text. Expects that existing line breaks are posix newlines.
     """
-    return reduce(lambda line, word, width=width: '%s%s%s' %
-                  (line,
-                   ' \n'[(len(line[line.rfind('\n')+1:])
-                         + len(word.split('\n',1)[0]
-                              ) >= width)],
-                   word),
-                  text.split(' ')
-                 )
+    def _generator():
+        it = iter(text.split(' '))
+        word = it.next()
+        yield word
+        pos = len(word) - word.rfind('\n') - 1
+        for word in it:
+            if "\n" in word:
+                lines = word.split('\n')
+            else:
+                lines = (word,)
+            pos += len(lines[0]) + 1
+            if pos > width:
+                yield '\n'
+                pos = len(lines[-1])
+            else:
+                yield ' '
+                if len(lines) > 1:
+                    pos = len(lines[-1])
+            yield word
+    return "".join(_generator())
 
 def truncate_words(s, num):
     "Truncates a string after a certain number of words."
@@ -30,6 +41,66 @@
             words.append('...')
     return ' '.join(words)
 
+def truncate_html_words(s, num):
+    """
+    Truncates html to a certain number of words (not counting tags and comments).
+    Closes opened tags if they were correctly closed in the given html.
+    """
+    length = int(num)
+    if length <= 0:
+        return ''
+    html4_singlets = ('br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input')
+    # Set up regular expressions
+    re_words = re.compile(r'&.*?;|<.*?>|([A-Za-z0-9][\w-]*)')
+    re_tag = re.compile(r'<(/)?([^ ]+?)(?: (/)| .*?)?>')
+    # Count non-HTML words and keep note of open tags
+    pos = 0
+    ellipsis_pos = 0
+    words = 0
+    open_tags = []
+    while words <= length:
+        m = re_words.search(s, pos)
+        if not m:
+            # Checked through whole string
+            break
+        pos = m.end(0)
+        if m.group(1):
+            # It's an actual non-HTML word
+            words += 1
+            if words == length:
+                ellipsis_pos = pos
+            continue
+        # Check for tag
+        tag = re_tag.match(m.group(0))
+        if not tag or ellipsis_pos:
+            # Don't worry about non tags or tags after our truncate point
+            continue
+        closing_tag, tagname, self_closing = tag.groups()
+        tagname = tagname.lower()  # Element names are always case-insensitive
+        if self_closing or tagname in html4_singlets:
+            pass
+        elif closing_tag:
+            # Check for match in open tags list
+            try:
+                i = open_tags.index(tagname)
+            except ValueError:
+                pass
+            else:
+                # SGML: An end tag closes, back to the matching start tag, all unclosed intervening start tags with omitted end tags
+                open_tags = open_tags[i+1:]
+        else:
+            # Add it to the start of the open tags list
+            open_tags.insert(0, tagname)
+    if words <= length:
+        # Don't try to close tags if we don't need to truncate
+        return s
+    out = s[:ellipsis_pos] + ' ...'
+    # Close any tags still open
+    for tag in open_tags:
+        out += '</%s>' % tag
+    # Return string
+    return out
+
 def get_valid_filename(s):
     """
     Returns the given string converted to a string that can be used for a clean
@@ -94,8 +165,9 @@
     return zbuf.getvalue()
 
 ustring_re = re.compile(u"([\u0080-\uffff])")
-def javascript_quote(s):
 
+def javascript_quote(s, quote_double_quotes=False):
+
     def fix(match):
         return r"\u%04x" % ord(match.group(1))
 
@@ -104,9 +176,12 @@
     elif type(s) != unicode:
         raise TypeError, s
     s = s.replace('\\', '\\\\')
+    s = s.replace('\r', '\\r')
     s = s.replace('\n', '\\n')
     s = s.replace('\t', '\\t')
     s = s.replace("'", "\\'")
+    if quote_double_quotes:
+        s = s.replace('"', '&quot;')
     return str(ustring_re.sub(fix, s))
 
 smart_split_re = re.compile('("(?:[^"\\\\]*(?:\\\\.[^"\\\\]*)*)"|\'(?:[^\'\\\\]*(?:\\\\.[^\'\\\\]*)*)\'|[^\\s]+)')

Modified: cs/pythia/trunk/opal/views/debug.py
===================================================================
--- cs/pythia/trunk/opal/views/debug.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/views/debug.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -4,7 +4,7 @@
 from opal.http import HttpResponseServerError, HttpResponseNotFound
 import os, re
 
-HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD')
+HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD|PROFANITIES_LIST')
 
 def linebreak_iter(template_source):
     yield 0
@@ -75,7 +75,7 @@
         loader_debug_info = []
         for loader in template_source_loaders:
             try:
-                source_list_func = getattr(__import__(loader.__module__, '', '', ['get_template_sources']), 'get_template_sources')
+                source_list_func = getattr(__import__(loader.__module__, {}, {}, ['get_template_sources']), 'get_template_sources')
                 # NOTE: This assumes exc_value is the name of the template that
                 # the loader attempted to load.
                 template_list = [{'name': t, 'exists': os.path.exists(t)} \
@@ -115,7 +115,7 @@
             'function': '?',
             'lineno': '?',
         }]
-    t = Template(TECHNICAL_500_TEMPLATE)
+    t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template')
     c = Context({
         'exception_type': exc_type.__name__,
         'exception_value': exc_value,
@@ -141,7 +141,7 @@
             # tried exists but is an empty list. The URLconf must've been empty.
             return empty_urlconf(request)
 
-    t = Template(TECHNICAL_404_TEMPLATE)
+    t = Template(TECHNICAL_404_TEMPLATE, name='Technical 404 template')
     c = Context({
         'root_urlconf': settings.ROOT_URLCONF,
         'urlpatterns': tried,
@@ -154,7 +154,7 @@
 
 def empty_urlconf(request):
     "Create an empty URLconf 404 error response."
-    t = Template(EMPTY_URLCONF_TEMPLATE)
+    t = Template(EMPTY_URLCONF_TEMPLATE, name='Empty URLConf template')
     c = Context({
         'project_name': settings.name
     })
@@ -189,7 +189,7 @@
 <head>
   <meta http-equiv="content-type" content="text/html; charset=utf-8" />
   <meta name="robots" content="NONE,NOARCHIVE" />
-  <title>{{ exception_type }} at {{ request.path }}</title>
+  <title>{{ exception_type }} at {{ request.path|escape }}</title>
   <style type="text/css">
     html * { padding:0; margin:0; }
     body * { padding:10px 20px; }
@@ -292,7 +292,7 @@
 <body>
 
 <div id="summary">
-  <h1>{{ exception_type }} at {{ request.path }}</h1>
+  <h1>{{ exception_type }} at {{ request.path|escape }}</h1>
   <h2>{{ exception_value|escape }}</h2>
   <table class="meta">
     <tr>
@@ -301,7 +301,7 @@
     </tr>
     <tr>
       <th>Request URL:</th>
-      <td>{{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path }}</td>
+      <td>{{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path|escape }}</td>
     </tr>
     <tr>
       <th>Exception Type:</th>
@@ -309,7 +309,7 @@
     </tr>
     <tr>
       <th>Exception Value:</th>
-      <td>{{ exception_value }}</td>
+      <td>{{ exception_value|escape }}</td>
     </tr>
     <tr>
       <th>Exception Location:</th>
@@ -412,7 +412,7 @@
     &nbsp;&nbsp;{{ frame.lineno }}. {{ frame.context_line|escape }}<br/>
   {% endif %}
 {% endfor %}<br/>
-&nbsp;&nbsp;{{ exception_type }} at {{ request.path }}<br/>
+&nbsp;&nbsp;{{ exception_type }} at {{ request.path|escape }}<br/>
 &nbsp;&nbsp;{{ exception_value|escape }}</code>
           </td>
         </tr>
@@ -545,7 +545,7 @@
 <html lang="en">
 <head>
   <meta http-equiv="content-type" content="text/html; charset=utf-8" />
-  <title>Page not found at {{ request.path }}</title>
+  <title>Page not found at {{ request.path|escape }}</title>
   <meta name="robots" content="NONE,NOARCHIVE" />
   <style type="text/css">
     html * { padding:0; margin:0; }
@@ -575,7 +575,7 @@
       </tr>
       <tr>
         <th>Request URL:</th>
-      <td>{{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path }}</td>
+      <td>{{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path|escape }}</td>
       </tr>
     </table>
   </div>
@@ -590,7 +590,7 @@
           <li>{{ pattern|escape }}</li>
         {% endfor %}
       </ol>
-      <p>The current URL, <code>{{ request.path }}</code>, didn't match any of these.</p>
+      <p>The current URL, <code>{{ request.path|escape }}</code>, didn't match any of these.</p>
     {% else %}
       <p>{{ reason|escape }}</p>
     {% endif %}

Modified: cs/pythia/trunk/opal/views/defaults.py
===================================================================
--- cs/pythia/trunk/opal/views/defaults.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/views/defaults.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -75,7 +75,7 @@
         request_path
             The path of the requested URL (e.g., '/app/pages/bad_page/')
     """
-    t = loader.get_template(template_name)
+    t = loader.get_template(template_name) # You need to create a 404.html template.
     return http.HttpResponseNotFound(t.render(RequestContext(request, {'request_path': request.path})))
 
 def server_error(request, template_name='500.html'):
@@ -85,5 +85,5 @@
     Templates: `500.html`
     Context: None
     """
-    t = loader.get_template(template_name)
+    t = loader.get_template(template_name) # You need to create a 500.html template.
     return http.HttpResponseServerError(t.render(Context({})))

Modified: cs/pythia/trunk/opal/views/generic/date_based.py
===================================================================
--- cs/pythia/trunk/opal/views/generic/date_based.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/views/generic/date_based.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -1,6 +1,7 @@
 from opal.template import loader, RequestContext
 from opal.core.exceptions import ObjectDoesNotExist
 from opal.core.xheaders import populate_xheaders
+from opal.db.models.fields import DateTimeField
 from opal.http import Http404, HttpResponse
 import datetime, time
 
@@ -235,9 +236,10 @@
     model = queryset.model
     now = datetime.datetime.now()
 
-    lookup_kwargs = {
-        '%s__range' % date_field: (datetime.datetime.combine(date, datetime.time.min), datetime.datetime.combine(date, datetime.time.max)),
-    }
+    if isinstance(model._meta.get_field(date_field), DateTimeField):
+        lookup_kwargs = {'%s__range' % date_field: (datetime.datetime.combine(date, datetime.time.min), datetime.datetime.combine(date, datetime.time.max))}
+    else:
+        lookup_kwargs = {date_field: date}
 
     # Only bother to check current date if the date isn't in the past and future objects aren't requested.
     if date >= now.date() and not allow_future:
@@ -304,9 +306,10 @@
     model = queryset.model
     now = datetime.datetime.now()
 
-    lookup_kwargs = {
-        '%s__range' % date_field: (datetime.datetime.combine(date, datetime.time.min), datetime.datetime.combine(date, datetime.time.max)),
-    }
+    if isinstance(model._meta.get_field(date_field), DateTimeField):
+        lookup_kwargs = {'%s__range' % date_field: (datetime.datetime.combine(date, datetime.time.min), datetime.datetime.combine(date, datetime.time.max))}
+    else:
+        lookup_kwargs = {date_field: date}
 
     # Only bother to check current date if the date isn't in the past and future objects aren't requested.
     if date >= now.date() and not allow_future:

Modified: cs/pythia/trunk/opal/views/generic/simple.py
===================================================================
--- cs/pythia/trunk/opal/views/generic/simple.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/views/generic/simple.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -2,12 +2,18 @@
 from opal.template import RequestContext
 from opal.http import HttpResponse, HttpResponsePermanentRedirect, HttpResponseGone
 
-def direct_to_template(request, template, **kwargs):
+def direct_to_template(request, template, extra_context={}, **kwargs):
     """
     Render a given template with any extra URL parameters in the context as
     ``{{ params }}``.
     """
-    return render_to_response(template, {'params' : kwargs}, context_instance=RequestContext(request))
+    dictionary = {'params': kwargs}
+    for key, value in extra_context.items():
+        if callable(value):
+            dictionary[key] = value()
+        else:
+            dictionary[key] = value
+    return render_to_response(template, dictionary, context_instance=RequestContext(request))
 
 def redirect_to(request, url, **kwargs):
     """
@@ -18,7 +24,7 @@
     ``/foo/<id>/`` to ``/bar/<id>/``, you could use the following URLconf::
 
         urlpatterns = patterns('',
-            ('^foo/(?p<id>\d+)/$', 'opal.views.generic.simple.redirect_to', {'url' : '/bar/%(id)s/'}),
+            ('^foo/(?P<id>\d+)/$', 'opal.views.generic.simple.redirect_to', {'url' : '/bar/%(id)s/'}),
         )
 
     If the given url is ``None``, a HttpResponseGone (410) will be issued.

Modified: cs/pythia/trunk/opal/views/i18n.py
===================================================================
--- cs/pythia/trunk/opal/views/i18n.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/views/i18n.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -9,16 +9,16 @@
     """
     Redirect to a given url while setting the chosen language in the
     session or cookie. The url and the language code need to be
-    specified in the GET paramters.
+    specified in the GET parameters.
     """
-    lang_code = request.GET['language']
+    lang_code = request.GET.get('language', None)
     next = request.GET.get('next', None)
     if not next:
         next = request.META.get('HTTP_REFERER', None)
     if not next:
         next = '/'
     response = http.HttpResponseRedirect(next)
-    if check_for_language(lang_code):
+    if lang_code and check_for_language(lang_code):
         if hasattr(request, 'session'):
             request.session['opal_language'] = lang_code
         else:

Modified: cs/pythia/trunk/opal/views/static.py
===================================================================
--- cs/pythia/trunk/opal/views/static.py	2007-07-23 19:50:29 UTC (rev 7736)
+++ cs/pythia/trunk/opal/views/static.py	2007-07-24 00:33:05 UTC (rev 7737)
@@ -81,7 +81,7 @@
     try:
         t = loader.get_template('static/directory_index')
     except TemplateDoesNotExist:
-        t = Template(DEFAULT_DIRECTORY_INDEX_TEMPLATE)
+        t = Template(DEFAULT_DIRECTORY_INDEX_TEMPLATE, name='Default directory index template')
     files = []
     for f in os.listdir(fullpath):
         if not f.startswith('.'):



More information about the cig-commits mailing list