diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..5a23989a
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @dlo @RossRogers
diff --git a/.gitignore b/.gitignore
index d05253fa..1780470f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,3 +3,4 @@
build/
dist/
django_pyodbc.egg-info/
+.eggs/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000..08cb51b7
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,54 @@
+The Contributors License Agreement
+==========================================
+
+Version 2.0
+-----------
+
+### Preface
+
+This Agreement is between You, the Users, the Contributors, and Lionheart Software LLC ("Lionheart"). The intent of the Agreement is to clarify your responsibilities as a Contributor.
+
+The Agreement is meant to protect You, the Users, the Contributors, and Lionheart from the malicious misdeeds of others. The Agreement is *not* intended to restrict how You use Your own Contributions.
+
+The Agreement is written in plain English to be easily understood by You and all those affected by it. The terms defined below and used throughout this document are capitalized in order to reduce ambiguity, but they should be understood to have their usual colloquial meaning and the singular shall include the plural.
+
+### Definitions
+
+1. "Agreement" — This document.
+
+2. "You" / "Your" — The individual signing this Agreement.
+
+3. "Lionheart" — The company which manages the Projects. The company's website is at .
+
+4. "Project" — Open source projects being managed by Lionheart. The Projects may all be found at .
+
+5. "Contribution" — Code, content, and ideas that are meant to be integrated into Projects. Issues (bug report) and pull requests (suggestions to integrate content) filed on a Project are implicitly Contributions, unless they clearly state up-front, "This is not a contribution." Contributions may take other forms as well, including (but not limited to) email or other more general communication intended to be received by one or more Contributors.
+
+6. "Contributor" — Those people and corporations that provide Contributions.
+
+7. "User" — Those people and companies which make use of the Projects. Making use of a Project includes (but is not limited to) compiling and/or running its content, taking some action which causes its content to be used, copying its content, distributing its content, modifying its content, and even taking its content and/or ideas for a less direct purpose.
+
+
+### Licensing Agreement
+
+When You provide a Contribution to a Project:
+
+* You assert that the Contribution was authored by You.
+
+* You license that Contribution under the terms of the license of that Project. More specifically, You grant an irrevocable license for Your Contribution to the Users, the Contributors, and Lionheart.
+
+* You assert that You have the authority and are able to license Your Contribution under the license of that Project.
+
+* If Your employer has rights to intellectual property that You create which may include Your Contribution, then You assert that Your employer has granted You explicit permission to license the Contribution on behalf of Your employer.
+
+Furthermore:
+
+* You assert that You have the authority and are able to enter into this Agreement.
+
+* If You become aware of a problem with any of Your Contributions or with the fact that You have signed this Agreement, then You agree to inform Lionheart about the problem, without any undue delay.
+
+To the extent possible under law, Lionheart has dedicated all copyright and related and neighboring rights to the text of this license to the public domain worldwide. This software is distributed without any warranty. See .
+
+---
+
+This CLA was copied with modifications from https://github.com/Medium/opensource/blob/master/cla-2.0.md.
diff --git a/LICENSE b/LICENSE
index 317b93d5..5c4d0420 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,19 +1,185 @@
-Copyright 2013-2014 Lionheart Software LLC
-Licensed under the Apache License, Version 2.0 (the "License"); you may not use
-this file except in compliance with the License. You may obtain a copy of the
-License at
- http://www.apache.org/licenses/LICENSE-2.0
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
-Unless required by applicable law or agreed to in writing, software distributed
-under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
-CONDITIONS OF ANY KIND, either express or implied. See the License for the
-specific language governing permissions and limitations under the License.
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
----
-Copyright (c) 2008,2009 django-pyodbc developers (see README.txt).
+Copyright (c) 2008,2009 django-sql-server developers (see original README.rst).
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
diff --git a/Makefile b/Makefile
index 99bb7f5c..e95553ae 100644
--- a/Makefile
+++ b/Makefile
@@ -1,2 +1,26 @@
-publish:
- python setup.py sdist upload --sign
+# Copyright 2015-2018 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+all: clean test publish
+
+clean:
+ rm -rf dist/
+
+test:
+ python setup.py test
+
+publish: clean
+ python3 setup.py bdist_wheel --universal
+ gpg --detach-sign -a dist/*.whl
+ twine upload dist/*
diff --git a/README.rst b/README.rst
index e46cdb99..cf34620e 100644
--- a/README.rst
+++ b/README.rst
@@ -1,13 +1,10 @@
django-pyodbc
=============
-|version|_ |downloads|_
-
-.. |downloads| image:: http://img.shields.io/pypi/dm/django-pyodbc.svg?style=flat
-.. _downloads: https://pypi.python.org/pypi/django-pyodbc
+|version|_
.. |version| image:: http://img.shields.io/pypi/v/django-pyodbc.svg?style=flat
-.. _version: https://pypi.python.org/pypi/django-pyodbc
+.. _version: https://pypi.org/project/django-pyodbc
django-pyodbc is a `Django `_ SQL Server DB backend powered by the `pyodbc `_ library. pyodbc is a mature, viable way to access SQL Server from Python in multiple platforms and is actively maintained. It's also used by SQLAlchemy for SQL Server connections.
@@ -15,19 +12,24 @@ This is a fork of the original `django-pyodbc =2.0.0a1``
+* [x] Alpha support for Django 3.1 via ``pip install django-pyodbc>=2.0.0a2``
+* [x] Support for Django 1.4-1.10.
+* [x] Support for SQL Server 2000, 2005, 2008, and 2012 (please let us know if you have success running this backend with another version of SQL Server)
+* [x] Support for Openedge 11.6
+* [x] Support for `IBM's DB2 `_
+* [x] Native Unicode support. Every string that goes in is stored as Unicode, and every string that goes out of the database is returned as Unicode. No conversion to/from intermediate encodings takes place, so things like max_length in CharField works just like expected.
+* [x] Both Windows Authentication (Integrated Security) and SQL Server Authentication.
+* [x] LIMIT+OFFSET and offset w/o LIMIT emulation under SQL Server 2005.
+* [x] LIMIT+OFFSET under SQL Server 2000.
+* [x] Django's TextField both under SQL Server 2000 and 2005.
+* [x] Passes most of the tests of the Django test suite.
+* [x] Compatible with SQL Server and SQL Server Native Client from Microsoft (Windows) and FreeTDS ODBC drivers (Linux).
+
+TODO
--------
-* Does not support Python3 -- see issue #47 https://github.com/lionheart/django-pyodbc/issues/47
+* [ ] Python 3 support. See `#47 `_ for details.
Installation
------------
@@ -37,7 +39,7 @@ Installation
.. code:: python
pip install django-pyodbc
-
+
2. Now you can now add a database to your settings using standard ODBC parameters.
.. code:: python
@@ -55,7 +57,22 @@ Installation
}
}
-3. That's it! You're done.
+3. That's it! You're done.*
+
+ \* *You may need to configure your machine and drivers to do an*
+ `ODBC `_
+ *connection to your database server, if you haven't already. For Linux this
+ involves installing and*
+ `configuring Unix ODBC and FreeTDS `_ .
+ *Iterate on the command line to test your*
+ `pyodbc `_ *connection like:*
+
+ .. code:: python
+
+ python -c 'import pyodbc; print(pyodbc.connect("DSN=foobar_mssql_data_source_name;UID=foo;PWD=bar").cursor().execute("select 1"))'
+
+ *extended instructions* `here `_
+
Configuration
-------------
@@ -69,6 +86,8 @@ Standard Django settings
``HOST`` String. SQL Server instance in ``server\instance`` or ``ip,port`` format.
+``PORT`` String. SQL Server port.
+
``USER`` String. Database user name. If not given then MS Integrated Security
will be used.
@@ -125,6 +144,19 @@ Standard Django settings
Boolean. This will restrict the table list query to the dbo schema.
+* ``openedge``
+
+ Boolean. This will trigger support for Progress Openedge
+
+* ``left_sql_quote`` , ``right_sql_quote``
+
+ String. Specifies the string to be inserted for left and right quoting of SQL identifiers respectively. Only set these if django-pyodbc isn't guessing the correct quoting for your system.
+
+
+OpenEdge Support
+~~~~~~~~~~~~~~~~~~~~~~~~
+For OpenEdge support make sure you supply both the deiver and the openedge extra options, all other parameters should work the same
+
Tests
-----
@@ -138,21 +170,35 @@ To run the test suite:
License
-------
-See `LICENSE `_.
+This project originally started life as django-sql-server. This project was
+abandoned in 2011 and was brought back to life as django-pyodbc by our team in
+2013. In the process, most of the project was refactored and brought up to speed
+with modern Django best practices. The work done prior to the 2013 rewrite is
+licensed under BSD (3-Clause). Improvements since then are licensed under Apache
+2.0. See `LICENSE `_ for more details.
+
+
+SemVer
+------
+
+This project implements `Semantic Versioning `_ .
+
Credits
-------
+* `Aaron Aichlmayr `_
* `Adam Vandenber `_
* `Alex Vidal `_
* `Dan Loewenherz `_
* `Filip Wasilewski `_
+* `mamcx `_ "For the first implementation using pymssql."
* `Michael Manfre `_
* `Michiya Takahashi `_
* `Paul Tax `_
* `Ramiro Morales `_
+* `Ross Rogers `_
* `Wei guangjing `_
-* `mamcx `_
From the original project README.
diff --git a/bump_version.sh b/bump_version.sh
new file mode 100755
index 00000000..a95d357c
--- /dev/null
+++ b/bump_version.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+if [ "$1" != "" ]; then
+ sed -i "" "s/\(__version__[ ]*=\).*/\1 \"$1\"/g" django_pyodbc/metadata.py
+ git add .
+ git commit -m "bump version to $1"
+ git tag $1
+ git push origin master
+ git push --tags
+ make
+fi
diff --git a/django_pyodbc/aggregates.py b/django_pyodbc/aggregates.py
index f1001ff0..29625a3c 100644
--- a/django_pyodbc/aggregates.py
+++ b/django_pyodbc/aggregates.py
@@ -1,3 +1,45 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
from django.db.models.sql.aggregates import Aggregate
class _Aggregate(Aggregate):
diff --git a/django_pyodbc/base.py b/django_pyodbc/base.py
index f467417d..bae81391 100644
--- a/django_pyodbc/base.py
+++ b/django_pyodbc/base.py
@@ -1,3 +1,45 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
"""
MS SQL Server database backend for Django.
"""
@@ -7,7 +49,17 @@
import sys
import warnings
+from django import VERSION as DjangoVersion
+from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
+from django.db import utils
+from django.db.backends.signals import connection_created
+
+from django_pyodbc.client import DatabaseClient
+from django_pyodbc.compat import binary_type, text_type, timezone
+from django_pyodbc.creation import DatabaseCreation
+from django_pyodbc.introspection import DatabaseIntrospection
+from django_pyodbc.operations import DatabaseOperations
try:
import pyodbc as Database
@@ -22,7 +74,6 @@
if pyodbc_ver < (2, 0, 38, 9999):
raise ImproperlyConfigured("pyodbc 2.0.38 or newer is required; you have %s" % Database.version)
-from django.db import utils
try:
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.base.features import BaseDatabaseFeatures
@@ -30,32 +81,20 @@
except ImportError:
# import location prior to Django 1.8
from django.db.backends import BaseDatabaseWrapper, BaseDatabaseFeatures, BaseDatabaseValidation
-from django.db.backends.signals import connection_created
-
-from django.conf import settings
-from django import VERSION as DjangoVersion
-if DjangoVersion[:2] == (1, 8):
- _DJANGO_VERSION = 18
-elif DjangoVersion[:2] == (1, 7):
- _DJANGO_VERSION = 17
-elif DjangoVersion[:2] == (1, 6):
- _DJANGO_VERSION = 16
-elif DjangoVersion[:2] == (1, 5):
- _DJANGO_VERSION = 15
-elif DjangoVersion[:2] == (1, 4):
- _DJANGO_VERSION = 14
-elif DjangoVersion[:2] == (1, 3):
- _DJANGO_VERSION = 13
-elif DjangoVersion[:2] == (1, 2):
- _DJANGO_VERSION = 12
+
+if DjangoVersion[:2] == (2, 0):
+ _DJANGO_VERSION = 20
+elif DjangoVersion[:2] == (3, 1):
+ _DJANGO_VERSION = 31
else:
- raise ImproperlyConfigured("Django %d.%d is not supported." % DjangoVersion[:2])
+ if DjangoVersion[0] == 1:
+ raise ImproperlyConfigured("Django %d.%d " % DjangoVersion[:2] +
+ "is not supported on 2.+ versions of django-pyodbc. Please look " +
+ "into the 1.x versions of django-pyodbc to see if your 1.x " +
+ "version of Django is supported by django-pyodbc")
+ else:
+ raise ImproperlyConfigured("Django %d.%d is not supported." % DjangoVersion[:2])
-from django_pyodbc.operations import DatabaseOperations
-from django_pyodbc.client import DatabaseClient
-from django_pyodbc.compat import binary_type, text_type, timezone
-from django_pyodbc.creation import DatabaseCreation
-from django_pyodbc.introspection import DatabaseIntrospection
DatabaseError = Database.Error
IntegrityError = Database.IntegrityError
@@ -71,7 +110,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
allow_sliced_subqueries = False
supports_paramstyle_pyformat = False
- #has_bulk_insert = False
+ has_bulk_insert = False
# DateTimeField doesn't support timezones, only DateTimeOffsetField
supports_timezones = False
supports_sequence_reset = False
@@ -128,6 +167,17 @@ class DatabaseWrapper(BaseDatabaseWrapper):
# TODO: freetext, full-text contains...
}
+ # In Django 1.8 data_types was moved from DatabaseCreation to DatabaseWrapper.
+ # See https://docs.djangoproject.com/en/1.10/releases/1.8/#database-backend-api
+ data_types = DatabaseCreation.data_types
+ features_class = DatabaseFeatures
+ ops_class = DatabaseOperations
+ client_class = DatabaseClient
+ creation_class = DatabaseCreation
+ introspection_class = DatabaseIntrospection
+ validation_class = BaseDatabaseValidation
+
+
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
@@ -155,17 +205,14 @@ def __init__(self, *args, **kwargs):
self.test_create = self.settings_dict.get('TEST_CREATE', True)
- if _DJANGO_VERSION >= 13:
- self.features = DatabaseFeatures(self)
- else:
- self.features = DatabaseFeatures()
+ self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
self.connection = None
-
+
def get_connection_params(self):
settings_dict = self.settings_dict
@@ -203,8 +250,15 @@ def _get_connection_string(self):
settings_dict = self.settings_dict
db_str, user_str, passwd_str, port_str = None, None, "", None
options = settings_dict['OPTIONS']
+ test = settings_dict['TEST']
+ try:
+ test_name = test.get('NAME')
+ except AttributeError:
+ test_name = None
if settings_dict['NAME']:
db_str = settings_dict['NAME']
+ elif test_name:
+ db_str = test_name
if settings_dict['HOST']:
host_str = settings_dict['HOST']
else:
@@ -254,6 +308,10 @@ def _get_connection_string(self):
if port_str:
host_str += ';PORT=%s' % port_str
cstr_parts.append('SERVER=%s' % host_str)
+ elif self.ops.is_openedge:
+ if port_str:
+ host_str += ';PortNumber=%s' % port_str
+ cstr_parts.append('HostName=%s' % host_str)
else:
cstr_parts.append('SERVERNAME=%s' % host_str)
@@ -286,11 +344,11 @@ def _cursor(self):
options = settings_dict['OPTIONS']
autocommit = options.get('autocommit', False)
if self.unicode_results:
- self.connection = Database.connect(connstr, \
- autocommit=autocommit, \
+ self.connection = Database.connect(connstr,
+ autocommit=autocommit,
unicode_results='True')
else:
- self.connection = Database.connect(connstr, \
+ self.connection = Database.connect(connstr,
autocommit=autocommit)
connection_created.send(sender=self.__class__, connection=self)
@@ -301,12 +359,13 @@ def _cursor(self):
# Django convention for the 'week_day' Django lookup) if the user
# hasn't told us otherwise
- if not self.ops.is_db2:
+ if not self.ops.is_db2 and not self.ops.is_openedge:
# IBM's DB2 doesn't support this syntax and a suitable
# equivalent could not be found.
cursor.execute("SET DATEFORMAT ymd; SET DATEFIRST %s" % self.datefirst)
if self.ops.sql_server_ver < 2005:
self.creation.data_types['TextField'] = 'ntext'
+ self.data_types['TextField'] = 'ntext'
self.features.can_return_id_from_insert = False
ms_sqlncli = re.compile('^((LIB)?SQLN?CLI|LIBMSODBCSQL)')
@@ -342,7 +401,7 @@ def _cursor(self):
self.driver_supports_utf8 = (self.drv_name == 'SQLSRV32.DLL'
or ms_sqlncli.match(self.drv_name))
- return CursorWrapper(cursor, self.driver_supports_utf8, self.encoding)
+ return CursorWrapper(cursor, self.driver_supports_utf8, self.encoding, self)
def _execute_foreach(self, sql, table_names=None):
cursor = self.cursor()
@@ -371,29 +430,26 @@ class CursorWrapper(object):
A wrapper around the pyodbc's cursor that takes in account a) some pyodbc
DB-API 2.0 implementation and b) some common ODBC driver particularities.
"""
- def __init__(self, cursor, driver_supports_utf8, encoding=""):
+ def __init__(self, cursor, driver_supports_utf8, encoding="", db_wrpr=None):
self.cursor = cursor
self.driver_supports_utf8 = driver_supports_utf8
self.last_sql = ''
self.last_params = ()
self.encoding = encoding
+ self.db_wrpr = db_wrpr
def close(self):
try:
self.cursor.close()
- except Database.ProgrammingError:
+ except:
pass
def format_sql(self, sql, n_params=None):
- if not self.driver_supports_utf8 and isinstance(sql, text_type):
- # Older FreeTDS (and other ODBC drivers?) don't support Unicode yet, so
- # we need to encode the SQL clause itself in utf-8
- sql = sql.encode('utf-8')
# pyodbc uses '?' instead of '%s' as parameter placeholder.
if n_params is not None:
try:
sql = sql % tuple('?' * n_params)
- except:
+ except Exception as e:
#Todo checkout whats happening here
pass
else:
@@ -405,15 +461,10 @@ def format_params(self, params):
fp = []
for p in params:
if isinstance(p, text_type):
- if not self.driver_supports_utf8:
- # Older FreeTDS (and other ODBC drivers?) doesn't support Unicode
- # yet, so we need to encode parameters in utf-8
- fp.append(p.encode('utf-8'))
- else:
- fp.append(p)
+ fp.append(p)
elif isinstance(p, binary_type):
if not self.driver_supports_utf8:
- fp.append(p.decode(self.encoding).encode('utf-8'))
+ fp.append(p.decode(self.encoding))
else:
fp.append(p)
elif isinstance(p, type(True)):
@@ -427,6 +478,9 @@ def format_params(self, params):
def execute(self, sql, params=()):
self.last_sql = sql
+ #django-debug toolbar error
+ if params == None:
+ params = ()
sql = self.format_sql(sql, len(params))
params = self.format_params(params)
self.last_params = params
@@ -498,6 +552,11 @@ def __getattr__(self, attr):
def __iter__(self):
return iter(self.cursor)
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ return False
# # MS SQL Server doesn't support explicit savepoint commits; savepoints are
# # implicitly committed with the transaction.
diff --git a/django_pyodbc/client.py b/django_pyodbc/client.py
old mode 100755
new mode 100644
index 54299dfd..be530d5c
--- a/django_pyodbc/client.py
+++ b/django_pyodbc/client.py
@@ -1,3 +1,45 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
try:
from django.db.backends.base.client import BaseDatabaseClient
except ImportError:
diff --git a/django_pyodbc/compat.py b/django_pyodbc/compat.py
index e0ef8a17..1415f549 100644
--- a/django_pyodbc/compat.py
+++ b/django_pyodbc/compat.py
@@ -1,3 +1,45 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
import sys
# native modules to substititute legacy Django modules
@@ -29,13 +71,13 @@
# new modules from Django1.5
try:
- from django.utils.six import PY3
+ from six import PY3
_py3 = PY3
except ImportError:
_py3 = False
try:
- from django.utils.six import b, binary_type, string_types, text_type
+ from six import b, binary_type, string_types, text_type
except ImportError:
b = lambda s: s
binary_type = str
diff --git a/django_pyodbc/compiler.py b/django_pyodbc/compiler.py
index 1d18aeaf..6b6eba82 100644
--- a/django_pyodbc/compiler.py
+++ b/django_pyodbc/compiler.py
@@ -1,8 +1,52 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
import re
-from django.db.models.sql import compiler
+import types
+from datetime import date, datetime
+
import django
-from datetime import datetime
from django import VERSION as DjangoVersion
+from django.db.models.sql import compiler, where
from django_pyodbc.compat import zip_longest
@@ -61,7 +105,7 @@ def _remove_order_limit_offset(sql):
return _re_order_limit_offset.sub('',sql).split(None, 1)[1]
def _break(s, find):
- """Break a string s into the part before the substring to find,
+ """Break a string s into the part before the substring to find,
and the part including and after the substring."""
i = s.find(find)
return s[:i], s[i:]
@@ -69,10 +113,19 @@ def _break(s, find):
def _get_order_limit_offset(sql):
return _re_order_limit_offset.search(sql).groups()
+def where_date(self, compiler, connection):
+ query, data = self.as_sql(compiler, connection)
+ if len(data) != 1:
+ raise Error('Multiple data items in date condition') # I don't think this can happen but I'm adding an exception just in case
+ if type(self.rhs) == date:
+ return [query, [self.rhs]]
+ elif type(self.lhs) == date:
+ return [query, [self.lhs]]
+
class SQLCompiler(compiler.SQLCompiler):
def __init__(self,*args,**kwargs):
super(SQLCompiler,self).__init__(*args,**kwargs)
- # Pattern to find the quoted column name at the end of a field
+ # Pattern to find the quoted column name at the end of a field
# specification
#
# E.g., if you're talking to MS SQL this regex would become
@@ -86,9 +139,20 @@ def __init__(self,*args,**kwargs):
left_sql_quote=self.connection.ops.left_sql_quote,
right_sql_quote=self.connection.ops.right_sql_quote))
+ def compile(self, node, select_format=False):
+ if self.connection.ops.is_openedge and type(node) is where.WhereNode:
+ for val in node.children:
+ # If we too many more of these special cases we should probably move them to another file
+ if type(val.rhs) == date or type(val.lhs) == date:
+ setattr(val, 'as_microsoft', types.MethodType(where_date, val))
+
+ args = [node]
+ if select_format:
+ args.append(select_format)
+ return super(SQLCompiler, self).compile(*args)
def resolve_columns(self, row, fields=()):
- # If the results are sliced, the resultset will have an initial
+ # If the results are sliced, the resultset will have an initial
# "row number" column. Remove this column before the ORM sees it.
if getattr(self, '_using_row_number', False):
row = row[1:]
@@ -109,53 +173,64 @@ def _fix_aggregates(self):
MSSQL doesn't match the behavior of the other backends on a few of
the aggregate functions; different return type behavior, different
function names, etc.
-
+
MSSQL's implementation of AVG maintains datatype without proding. To
match behavior of other django backends, it needs to not drop remainders.
E.g. AVG([1, 2]) needs to yield 1.5, not 1
"""
- for alias, aggregate in self.query.aggregate_select.items():
+ try:
+ # for django 1.10 and up (works starting in 1.8 so I am told)
+ select = self.query.annotation_select
+ except AttributeError:
+ # older
+ select = self.query.aggregate_select
+
+ for alias, aggregate in select.items():
+ if not hasattr(aggregate, 'sql_function'):
+ continue
if aggregate.sql_function == 'AVG':# and self.connection.cast_avg_to_float:
# Embed the CAST in the template on this query to
# maintain multi-db support.
- self.query.aggregate_select[alias].sql_template = \
+ select[alias].sql_template = \
'%(function)s(CAST(%(field)s AS FLOAT))'
# translate StdDev function names
elif aggregate.sql_function == 'STDDEV_SAMP':
- self.query.aggregate_select[alias].sql_function = 'STDEV'
+ select[alias].sql_function = 'STDEV'
elif aggregate.sql_function == 'STDDEV_POP':
- self.query.aggregate_select[alias].sql_function = 'STDEVP'
+ select[alias].sql_function = 'STDEVP'
# translate Variance function names
elif aggregate.sql_function == 'VAR_SAMP':
- self.query.aggregate_select[alias].sql_function = 'VAR'
+ select[alias].sql_function = 'VAR'
elif aggregate.sql_function == 'VAR_POP':
- self.query.aggregate_select[alias].sql_function = 'VARP'
+ select[alias].sql_function = 'VARP'
+
+ def as_sql(self, with_limits=True, with_col_aliases=False, qn=None, **kwargs):
+ self.pre_sql_setup()
- def as_sql(self, with_limits=True, with_col_aliases=False):
# Django #12192 - Don't execute any DB query when QS slicing results in limit 0
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
-
+
self._fix_aggregates()
-
+
self._using_row_number = False
-
+
# Get out of the way if we're not a select query or there's no limiting involved.
check_limits = with_limits and (self.query.low_mark or self.query.high_mark is not None)
if not check_limits:
- # The ORDER BY clause is invalid in views, inline functions,
- # derived tables, subqueries, and common table expressions,
+ # The ORDER BY clause is invalid in views, inline functions,
+ # derived tables, subqueries, and common table expressions,
# unless TOP or FOR XML is also specified.
try:
setattr(self.query, '_mssql_ordering_not_allowed', with_col_aliases)
- result = super(SQLCompiler, self).as_sql(with_limits, with_col_aliases)
+ result = super(SQLCompiler, self).as_sql(with_limits, with_col_aliases, **kwargs)
finally:
# remove in case query is every reused
delattr(self.query, '_mssql_ordering_not_allowed')
return result
- raw_sql, fields = super(SQLCompiler, self).as_sql(False, with_col_aliases)
-
+ raw_sql, fields = super(SQLCompiler, self).as_sql(False, with_col_aliases, **kwargs)
+
# Check for high mark only and replace with "TOP"
if self.query.high_mark is not None and not self.query.low_mark:
if self.connection.ops.is_db2:
@@ -166,7 +241,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
_select += ' DISTINCT'
sql = re.sub(r'(?i)^{0}'.format(_select), '{0} TOP {1}'.format(_select, self.query.high_mark), raw_sql, 1)
return sql, fields
-
+
# Else we have limits; rewrite the query using ROW_NUMBER()
self._using_row_number = True
@@ -180,7 +255,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
inner_table_name = qn('AAAA')
outer_fields, inner_select, order = self._fix_slicing_order(outer_fields, inner_select, order, inner_table_name)
-
+
# map a copy of outer_fields for injected subselect
f = []
for x in outer_fields.split(','):
@@ -199,13 +274,13 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
inner=inner_select,
inner_as=inner_table_name,
)
-
+
# IBM's DB2 cannot have a prefix of `_` for column names
row_num_col = 'django_pyodbc_row_num' if self.connection.ops.is_db2 else '_row_num'
where_row_num = '{0} < {row_num_col}'.format(self.query.low_mark, row_num_col=row_num_col)
if self.query.high_mark:
where_row_num += ' and {row_num_col} <= {0}'.format(self.query.high_mark, row_num_col=row_num_col)
-
+
# SQL Server 2000 doesn't support the `ROW_NUMBER()` function, thus it
# is necessary to use the `TOP` construct with `ORDER BY` so we can
# slice out a particular range of results.
@@ -215,7 +290,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
order_by_col = order_by_col_with_prefix.rsplit('.',1)[-1]
opposite_order_direction = REV_ODIR[order_direction]
sql = r'''
- SELECT
+ SELECT
1, -- placeholder for _row_num
* FROM
(
@@ -225,7 +300,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
*
FROM
(
- SELECT TOP
+ SELECT TOP
-- high_mark
{high_mark}
-- inner
@@ -233,7 +308,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
ORDER BY (
-- order_by_col
{left_sql_quote}AAAA{right_sql_quote}.{order_by_col}
- )
+ )
-- order_direction
{order_direction}
) AS BBBB ORDER BY ({left_sql_quote}BBBB{right_sql_quote}.{order_by_col}) {opposite_order_direction}
@@ -249,17 +324,17 @@ def as_sql(self, with_limits=True, with_col_aliases=False):
right_sql_quote=self.connection.ops.right_sql_quote,
)
else:
- sql = "SELECT {row_num_col}, {outer} FROM ( SELECT ROW_NUMBER() OVER ( ORDER BY {order}) as {row_num_col}, {inner}) as QQQ where {where}".format(
+ sql = "SELECT {outer}, {row_num_col} FROM ( SELECT ROW_NUMBER() OVER ( ORDER BY {order}) as {row_num_col}, {inner}) as QQQ where {where}".format(
outer=outer_fields,
order=order,
inner=inner_select,
where=where_row_num,
row_num_col=row_num_col
)
-
-
+
+
return sql, fields
-
+
def _select_top(self,select,inner_sql,number_to_fetch):
if self.connection.ops.is_db2:
return "{select} {inner_sql} FETCH FIRST {number_to_fetch} ROWS ONLY".format(
@@ -270,15 +345,15 @@ def _select_top(self,select,inner_sql,number_to_fetch):
def _fix_slicing_order(self, outer_fields, inner_select, order, inner_table_name):
"""
- Apply any necessary fixes to the outer_fields, inner_select, and order
+ Apply any necessary fixes to the outer_fields, inner_select, and order
strings due to slicing.
"""
# Using ROW_NUMBER requires an ordering
if order is None:
- meta = self.query.get_meta()
+ meta = self.query.get_meta()
column = meta.pk.db_column or meta.pk.get_attname()
order = '{0}.{1} ASC'.format(
- inner_table_name,
+ inner_table_name,
self.connection.ops.quote_name(column),
)
else:
@@ -297,8 +372,8 @@ def _fix_slicing_order(self, outer_fields, inner_select, order, inner_table_name
# remove any namespacing or table name from the column name
col = x.rsplit('.', 1)[-1]
# Is the ordering column missing from the inner select?
- # 'inner_select' contains the full query without the leading 'SELECT '.
- # It's possible that this can get a false hit if the ordering
+ # 'inner_select' contains the full query without the leading 'SELECT '.
+ # It's possible that this can get a false hit if the ordering
# column is used in the WHERE while not being in the SELECT. It's
# not worth the complexity to properly handle that edge case.
if x not in inner_select:
@@ -320,11 +395,11 @@ def _fix_slicing_order(self, outer_fields, inner_select, order, inner_table_name
def _alias_columns(self, sql):
"""Return tuple of SELECT and FROM clauses, aliasing duplicate column names."""
qn = self.connection.ops.quote_name
-
+
outer = list()
inner = list()
names_seen = list()
-
+
# replace all parens with placeholders
paren_depth, paren_buf = 0, ['']
parens, i = {}, 0
@@ -337,7 +412,7 @@ def _alias_columns(self, sql):
paren_depth -= 1
key = '_placeholder_{0}'.format(i)
buf = paren_buf.pop()
-
+
# store the expanded paren string
buf = re.sub(r'%([^\(])', r'$$$\1', buf)
parens[key] = buf% parens
@@ -369,9 +444,9 @@ def _alias_placeholders(val):
return "%(" + key + ")s"
temp_sql = re.sub("%s", _alias_placeholders, temp_sql)
-
+
select_list, from_clause = _break(temp_sql, ' FROM ' + self.connection.ops.left_sql_quote)
-
+
for col in [x.strip() for x in select_list.split(',')]:
match = self._re_pat_col.search(col)
if match:
@@ -385,17 +460,17 @@ def _alias_placeholders(val):
else:
outer.append(qn(col_name))
inner.append(_replace_sub(col))
-
+
names_seen.append(col_key)
else:
raise Exception('Unable to find a column name when parsing SQL: {0}'.format(col))
return ', '.join(outer), ', '.join(inner) + (from_clause % parens)
# ^^^^^^^^^^^^^^^^^^^^^
- # We can't use `format` here, because `format` uses `{}` as special
+ # We can't use `format` here, because `format` uses `{}` as special
# characters, but those happen to also be the quoting tokens for IBM's
# DB2
-
+
def get_ordering(self):
# The ORDER BY clause is invalid in views, inline functions,
@@ -424,7 +499,7 @@ def as_sql(self, *args, **kwargs):
if isinstance(result, list):
# Django 1.4 wraps return in list
return [self._fix_insert(x[0], x[1]) for x in result]
-
+
sql, params = result
return self._fix_insert(sql, params)
@@ -434,8 +509,8 @@ def _fix_insert(self, sql, params):
other necessary fixes.
"""
meta = self.query.get_meta()
-
- if meta.has_auto_field:
+
+ if getattr(meta, 'has_auto_field',False):
if hasattr(self.query, 'fields'):
# django 1.4 replaced columns with fields
fields = self.query.fields
@@ -444,12 +519,12 @@ def _fix_insert(self, sql, params):
# < django 1.4
fields = self.query.columns
auto_field = meta.auto_field.db_column or meta.auto_field.column
-
+
auto_in_fields = auto_field in fields
-
+
quoted_table = self.connection.ops.quote_name(meta.db_table)
if not fields or (auto_in_fields and len(fields) == 1 and not params):
- # convert format when inserting only the primary key without
+ # convert format when inserting only the primary key without
# specifying a value
sql = 'INSERT INTO {0} DEFAULT VALUES'.format(
quoted_table
@@ -467,9 +542,9 @@ def _fix_insert(self, sql, params):
if self.return_id and self.connection.features.can_return_id_from_insert:
col = self.connection.ops.quote_name(meta.pk.db_column or meta.pk.get_attname())
- # Determine datatype for use with the table variable that will return the inserted ID
+ # Determine datatype for use with the table variable that will return the inserted ID
pk_db_type = _re_data_type_terminator.split(meta.pk.db_type(self.connection))[0]
-
+
# NOCOUNT ON to prevent additional trigger/stored proc related resultsets
sql = 'SET NOCOUNT ON;{declare_table_var};{sql};{select_return_id}'.format(
sql=sql,
@@ -479,7 +554,7 @@ def _fix_insert(self, sql, params):
),
select_return_id="SELECT * FROM @sqlserver_ado_return_id",
)
-
+
output = self._values_repl.format(col=col)
sql = self._re_values_sub.sub(output, sql)
@@ -508,7 +583,7 @@ def as_sql_legacy(self):
sql = ' '.join(result)
meta = self.query.get_meta()
- if meta.has_auto_field:
+ if getattr(meta, 'has_auto_field',False):
# db_column is None if not explicitly specified by model field
auto_field_column = meta.auto_field.db_column or meta.auto_field.column
@@ -576,7 +651,7 @@ def as_sql(self):
# This section deals with specifically setting the primary key,
# or using default values if necessary
meta = self.query.get_meta()
- if meta.has_auto_field:
+ if getattr(meta, 'has_auto_field',False):
# db_column is None if not explicitly specified by model field
auto_field_column = meta.auto_field.db_column or meta.auto_field.column
out = []
@@ -603,20 +678,20 @@ class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
def as_sql(self, qn=None):
self._fix_aggregates()
- return super(SQLAggregateCompiler, self).as_sql(qn=qn)
+ return super(SQLAggregateCompiler, self).as_sql()
# django's compiler.SQLDateCompiler was removed in 1.8
-if DjangoVersion[0] >= 1 and DjangoVersion[1] >= 8:
-
+if DjangoVersion[0] > 1 or DjangoVersion[0] == 1 and DjangoVersion[1] >= 8:
+
import warnings
-
+
class DeprecatedMeta(type):
def __new__(cls, name, bases, attrs):
# if the metaclass is defined on the current class, it's not
# a subclass so we don't want to warn.
if attrs.get('__metaclass__') is not cls:
msg = ('In the 1.8 release of django, `SQLDateCompiler` was ' +
- 'removed. This was a parent class of `' + name +
+ 'removed. This was a parent class of `' + name +
'`, and thus `' + name + '` needs to be changed.')
raise ImportError(msg)
return super(DeprecatedMeta, cls).__new__(cls, name, bases, attrs)
@@ -626,7 +701,7 @@ class SQLDateCompiler(object):
class SQLDateTimeCompiler(object):
__metaclass__ = DeprecatedMeta
-
+
else:
class SQLDateCompiler(compiler.SQLDateCompiler, SQLCompiler):
pass
diff --git a/django_pyodbc/creation.py b/django_pyodbc/creation.py
index 86d7d3c7..038c0006 100644
--- a/django_pyodbc/creation.py
+++ b/django_pyodbc/creation.py
@@ -1,20 +1,63 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
import base64
import random
+from django_pyodbc.compat import b, md5_constructor
+
try:
from django.db.backends.base.creation import BaseDatabaseCreation
except ImportError:
# import location prior to Django 1.8
from django.db.backends.creation import BaseDatabaseCreation
-
-from django_pyodbc.compat import b, md5_constructor
+
class DataTypesWrapper(dict):
def __getitem__(self, item):
if item in ('PositiveIntegerField', 'PositiveSmallIntegerField'):
# The check name must be unique for the database. Add a random
- # component so the regresion tests don't complain about duplicate names
+ # component so the regression tests don't complain about duplicate names
fldtype = {'PositiveIntegerField': 'int', 'PositiveSmallIntegerField': 'smallint'}[item]
rnd_hash = md5_constructor(b(str(random.random()))).hexdigest()
unique = base64.b64encode(b(rnd_hash), b('__'))[:6]
@@ -61,10 +104,10 @@ class DatabaseCreation(BaseDatabaseCreation):
'SlugField': 'nvarchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'nvarchar(max)',
- 'TimeField': 'time',
+ 'TimeField': 'time',
})
- def _create_test_db(self, verbosity, autoclobber):
+ def _create_test_db(self, verbosity, autoclobber, keepdb=False):
settings_dict = self.connection.settings_dict
if self.connection._DJANGO_VERSION >= 13:
@@ -110,7 +153,7 @@ def _create_test_db(self, verbosity, autoclobber):
if self.connection.ops.on_azure_sql_db:
self.connection.close()
settings_dict["NAME"] = 'master'
- return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber)
+ return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber, keepdb)
def _destroy_test_db(self, test_database_name, verbosity):
"Internal implementation - remove the test db tables."
diff --git a/django_pyodbc/introspection.py b/django_pyodbc/introspection.py
index a3280222..fc8ae35e 100644
--- a/django_pyodbc/introspection.py
+++ b/django_pyodbc/introspection.py
@@ -1,8 +1,55 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
try:
- from django.db.backends.base.introspection import BaseDatabaseIntrospection
-except:
- # import location prior to Django 1.8
+ from django.db.backends.base.introspection import BaseDatabaseIntrospection, TableInfo
+except ImportError:
+ # Import location prior to Django 1.8
from django.db.backends import BaseDatabaseIntrospection
+
+ row_to_table_info = lambda row: row[0]
+else:
+ row_to_table_info = lambda row: TableInfo(row[0].lower(), row[1])
+
import pyodbc as Database
SQL_AUTOFIELD = -777555
@@ -41,14 +88,13 @@ def get_table_list(self, cursor):
Returns a list of table names in the current database.
"""
# TABLES: http://msdn2.microsoft.com/en-us/library/ms186224.aspx
- if cursor.db.limit_table_list:
- cursor.execute("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' AND TABLE_SCHEMA = 'dbo'")
+ # TODO: Believe the below queries should actually select `TABLE_NAME, TABLE_TYPE`
+ if cursor.db_wrpr.limit_table_list:
+ cursor.execute("SELECT TABLE_NAME, 't' FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' AND TABLE_SCHEMA = 'dbo'")
else:
- cursor.execute("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE'")
- return [row[0] for row in cursor.fetchall()]
+ cursor.execute("SELECT TABLE_NAME, 't' FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE'")
- # Or pyodbc specific:
- #return [row[2] for row in cursor.tables(tableType='TABLE')]
+ return [row_to_table_info(row) for row in cursor.fetchall()]
def _is_auto_field(self, cursor, table_name, column_name):
"""
diff --git a/django_pyodbc/management/commands/ss_loaddata.py b/django_pyodbc/management/commands/ss_loaddata.py
index 6f6f95b3..23bd4f29 100644
--- a/django_pyodbc/management/commands/ss_loaddata.py
+++ b/django_pyodbc/management/commands/ss_loaddata.py
@@ -1,7 +1,52 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
"""
ss_loaddata management command, we need to keep close track of changes in
django/core/management/commands/loaddata.py.
"""
+
+from __future__ import print_function
+
import sys
import os
import gzip
@@ -109,7 +154,7 @@ def read(self):
if formats:
if verbosity > 1:
- print "Loading '%s' fixtures..." % fixture_name
+ print("Loading '%s' fixtures..." % fixture_name)
else:
self.enable_forward_ref_checks(cursor)
sys.stderr.write(
@@ -126,7 +171,7 @@ def read(self):
for fixture_dir in fixture_dirs:
if verbosity > 1:
- print "Checking %s for fixtures..." % humanize(fixture_dir)
+ print("Checking %s for fixtures..." % humanize(fixture_dir))
label_found = False
for format in formats:
@@ -138,8 +183,8 @@ def read(self):
file_name = '.'.join([fixture_name, format])
if verbosity > 1:
- print "Trying %s for %s fixture '%s'..." % \
- (humanize(fixture_dir), file_name, fixture_name)
+ print("Trying %s for %s fixture '%s'..." % \
+ (humanize(fixture_dir), file_name, fixture_name))
full_path = os.path.join(fixture_dir, file_name)
open_method = compression_types[compression_format]
try:
@@ -147,8 +192,8 @@ def read(self):
if label_found:
fixture.close()
self.enable_forward_ref_checks(cursor)
- print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
- (fixture_name, humanize(fixture_dir)))
+ print(self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
+ (fixture_name, humanize(fixture_dir))))
transaction.rollback()
transaction.leave_transaction_management()
return
@@ -156,8 +201,8 @@ def read(self):
fixture_count += 1
objects_in_fixture = 0
if verbosity > 0:
- print "Installing %s fixture '%s' from %s." % \
- (format, fixture_name, humanize(fixture_dir))
+ print("Installing %s fixture '%s' from %s." % \
+ (format, fixture_name, humanize(fixture_dir)))
try:
objects = serializers.deserialize(format, fixture)
for obj in objects:
@@ -197,10 +242,10 @@ def read(self):
transaction.leave_transaction_management()
return
- except Exception, e:
+ except Exception as e:
if verbosity > 1:
- print "No %s fixture '%s' in %s." % \
- (format, fixture_name, humanize(fixture_dir))
+ print("No %s fixture '%s' in %s." % \
+ (format, fixture_name, humanize(fixture_dir)))
self.enable_forward_ref_checks(cursor)
@@ -210,7 +255,7 @@ def read(self):
sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
if sequence_sql:
if verbosity > 1:
- print "Resetting sequences"
+ print("Resetting sequences")
for line in sequence_sql:
cursor.execute(line)
@@ -220,10 +265,10 @@ def read(self):
if object_count == 0:
if verbosity > 1:
- print "No fixtures found."
+ print("No fixtures found.")
else:
if verbosity > 0:
- print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
+ print("Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count))
# Close the DB connection. This is required as a workaround for an
# edge case in MySQL: if the same connection is used to
diff --git a/django_pyodbc/metadata.py b/django_pyodbc/metadata.py
index dc068bff..ce8da404 100644
--- a/django_pyodbc/metadata.py
+++ b/django_pyodbc/metadata.py
@@ -1,5 +1,33 @@
-__version__ = "0.3.0"
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+__version__ = "2.0.0a2"
__maintainer__ = "Dan Loewenherz"
-__maintainer_email__ = "dan@dlo.me"
-__license__ = "See LICENSE"
+__maintainer_email__ = "dan@lionheartsw.com"
+__license__ = "BSD 3-Clause License"
diff --git a/django_pyodbc/operations.py b/django_pyodbc/operations.py
index 854572b2..17f943a3 100644
--- a/django_pyodbc/operations.py
+++ b/django_pyodbc/operations.py
@@ -1,3 +1,45 @@
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Copyright (c) 2008, django-pyodbc developers (see README.rst).
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of django-sql-server nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
import datetime
import decimal
import time
@@ -12,7 +54,8 @@
except ImportError:
# import location prior to Django 1.8
from django.db.backends import BaseDatabaseOperations
-
+from django.utils.dateparse import parse_date, parse_time, parse_datetime
+
from django_pyodbc.compat import smart_text, string_types, timezone
@@ -30,32 +73,53 @@ def __init__(self, connection):
self._ss_ver = None
self._ss_edition = None
self._is_db2 = None
-
+ self._is_openedge = None
+ self._left_sql_quote = None
+ self._right_sql_quote = None
+
@property
def is_db2(self):
if self._is_db2 is None:
- cur = self.connection.cursor()
- try:
- cur.execute("SELECT * FROM SYSIBM.COLUMNS FETCH FIRST 1 ROWS ONLY")
- self._is_db2 = True
- except Exception:
- self._is_db2 = False
-
+ options = self.connection.settings_dict.get('OPTIONS', {})
+ self._is_db2 = options.get('is_db2', False)
return self._is_db2
-
+
+ @property
+ def is_openedge(self):
+ if self._is_openedge is None:
+ options = self.connection.settings_dict.get('OPTIONS', {})
+ self._is_openedge = options.get('openedge', False)
+ return self._is_openedge
+
@property
def left_sql_quote(self):
- if self.is_db2:
- return '{'
- else:
- return '['
-
+ if self._left_sql_quote is None:
+ options = self.connection.settings_dict.get('OPTIONS', {})
+ q = options.get('left_sql_quote', None)
+ if q is not None:
+ self._left_sql_quote = q
+ elif self.is_db2:
+ self._left_sql_quote = '{'
+ elif self.is_openedge:
+ self._left_sql_quote = '"'
+ else:
+ self._left_sql_quote = '['
+ return self._left_sql_quote
+
@property
def right_sql_quote(self):
- if self.is_db2:
- return '}'
- else:
- return ']'
+ if self._right_sql_quote is None:
+ options = self.connection.settings_dict.get('OPTIONS', {})
+ q = options.get('right_sql_quote', None)
+ if q is not None:
+ self._right_sql_quote = q
+ elif self.is_db2:
+ self._right_sql_quote = '}'
+ elif self.is_openedge:
+ self._right_sql_quote = '"'
+ else:
+ self._right_sql_quote = ']'
+ return self._right_sql_quote
def _get_sql_server_ver(self):
"""
@@ -65,10 +129,12 @@ def _get_sql_server_ver(self):
return self._ss_ver
cur = self.connection.cursor()
ver_code = None
- if not self.is_db2:
+ if not self.is_db2 and not self.is_openedge:
cur.execute("SELECT CAST(SERVERPROPERTY('ProductVersion') as varchar)")
ver_code = cur.fetchone()[0]
ver_code = int(ver_code.split('.')[0])
+ else:
+ ver_code = 0
if ver_code >= 11:
self._ss_ver = 2012
elif ver_code == 10:
@@ -101,7 +167,7 @@ def date_extract_sql(self, lookup_type, field_name):
def date_trunc_sql(self, lookup_type, field_name):
return "DATEADD(%s, DATEDIFF(%s, 0, %s), 0)" % (lookup_type, lookup_type, field_name)
-
+
def _switch_tz_offset_sql(self, field_name, tzname):
"""
Returns the SQL that will convert field_name to UTC from tzname.
@@ -195,7 +261,10 @@ def last_insert_id(self, cursor, table_name, pk_name):
# @@IDENTITY is not limited to a specific scope.
table_name = self.quote_name(table_name)
- cursor.execute("SELECT CAST(IDENT_CURRENT(%s) as bigint)", [table_name])
+ if self._is_db2:
+ cursor.execute("SELECT CAST(IDENTITY_VAL_LOCAL() as bigint) from %s" % table_name)
+ else:
+ cursor.execute("SELECT py(IDENT_CURRENT(%s) as bigint)", [table_name])
return cursor.fetchone()[0]
def fetch_returned_insert_id(self, cursor):
@@ -362,7 +431,7 @@ def prep_for_iexact_query(self, x):
"""
return x
- def value_to_db_datetime(self, value):
+ def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
@@ -377,7 +446,7 @@ def value_to_db_datetime(self, value):
value = value.replace(microsecond=0)
return value
- def value_to_db_time(self, value):
+ def adapt_timefield_value(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
@@ -401,7 +470,7 @@ def year_lookup_bounds(self, value):
last = '%s-12-31 23:59:59'
return [first % value, last % value]
- def value_to_db_decimal(self, value, max_digits, decimal_places):
+ def adapt_decimalfield_value(self, value, max_digits, decimal_places):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
@@ -428,11 +497,20 @@ def convert_values(self, value, field):
if value is None:
return None
if field and field.get_internal_type() == 'DateTimeField':
+ if isinstance(value, string_types) and value:
+ value = parse_datetime(value)
return value
- elif field and field.get_internal_type() == 'DateField' and isinstance(value, datetime.datetime):
- value = value.date() # extract date
- elif field and field.get_internal_type() == 'TimeField' or (isinstance(value, datetime.datetime) and value.year == 1900 and value.month == value.day == 1):
- value = value.time() # extract time
+ elif field and field.get_internal_type() == 'DateField':
+ if isinstance(value, datetime.datetime):
+ value = value.date() # extract date
+ elif isinstance(value, string_types):
+ value = parse_date(value)
+ elif field and field.get_internal_type() == 'TimeField':
+ if (isinstance(value, datetime.datetime) and value.year == 1900 and value.month == value.day == 1):
+ value = value.time() # extract time
+ elif isinstance(value, string_types):
+ # If the value is a string, parse it using parse_time.
+ value = parse_time(value)
# Some cases (for example when select_related() is used) aren't
# caught by the DateField case above and date fields arrive from
# the DB as datetime instances.
@@ -451,16 +529,16 @@ def convert_values(self, value, field):
def return_insert_id(self):
"""
MSSQL implements the RETURNING SQL standard extension differently from
- the core database backends and this function is essentially a no-op.
+ the core database backends and this function is essentially a no-op.
The SQL is altered in the SQLInsertCompiler to add the necessary OUTPUT
clause.
"""
if self.connection._DJANGO_VERSION < 15:
- # This gets around inflexibility of SQLInsertCompiler's need to
+ # This gets around inflexibility of SQLInsertCompiler's need to
# append an SQL fragment at the end of the insert query, which also must
# expect the full quoted table and column name.
return ('/* %s */', '')
-
- # Django #19096 - As of Django 1.5, can return None, None to bypass the
+
+ # Django #19096 - As of Django 1.5, can return None, None to bypass the
# core's SQL mangling.
return (None, None)
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 00000000..c961ae3d
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_svn_revision = false
+
+[bdist_wheel]
+universal=1
diff --git a/setup.py b/setup.py
old mode 100644
new mode 100755
index 5d8833e1..68b762e5
--- a/setup.py
+++ b/setup.py
@@ -1,8 +1,22 @@
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright 2013-2017 Lionheart Software LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
import re
import os
-from django_pyodbc import metadata
try:
from setuptools import setup
@@ -21,15 +35,34 @@
long_description = link_regex.sub(r"", long_description)
long_description = link_alternate_regex.sub(r" :target: https://github.com/lionheart/django-pyodbc/blob/master/\1", long_description)
+metadata = {}
+metadata_file = "django_pyodbc/metadata.py"
+exec(compile(open(metadata_file).read(), metadata_file, 'exec'), metadata)
+
+# http://pypi.python.org/pypi?:action=list_classifiers
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "License :: OSI Approved :: BSD License",
+ "Natural Language :: English",
+ "Operating System :: MacOS :: MacOS X",
+ "Operating System :: OS Independent",
+ "Operating System :: Unix",
+ "Programming Language :: Python :: 3.9",
+ "Topic :: Software Development :: Libraries",
+]
+
setup(
name='django-pyodbc',
long_description=long_description,
- version=metadata.__version__,
- license=metadata.__license__,
- maintainer=metadata.__maintainer__,
- maintainer_email=metadata.__maintainer_email__,
- description="Django 1.5 SQL Server backend using pyodbc.",
- url='https://github.com/aurorasoftware/django-pyodbc',
+ version=metadata['__version__'],
+ license=metadata['__license__'],
+ maintainer=metadata['__maintainer__'],
+ maintainer_email=metadata['__maintainer_email__'],
+ description="Django 1.5-1.10 SQL Server backend using pyodbc.",
+ url='https://github.com/lionheart/django-pyodbc',
package_data={'': ['LICENSE', 'README.rst']},
packages=[
'django_pyodbc',
@@ -37,6 +70,7 @@
'django_pyodbc.management.commands'
],
install_requires=[
- 'pyodbc>=3.0.6,<3.1',
+ 'pyodbc>=3.0.6,<4.1',
+ 'six>=1.15.0'
]
)
diff --git a/tests/django14/aggregation/models.py b/tests/django14/aggregation/models.py
deleted file mode 100644
index ccc12898..00000000
--- a/tests/django14/aggregation/models.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# coding: utf-8
-from django.db import models
-
-
-class Author(models.Model):
- name = models.CharField(max_length=100)
- age = models.IntegerField()
- friends = models.ManyToManyField('self', blank=True)
-
- def __unicode__(self):
- return self.name
-
-class Publisher(models.Model):
- name = models.CharField(max_length=255)
- num_awards = models.IntegerField()
-
- def __unicode__(self):
- return self.name
-
-class Book(models.Model):
- isbn = models.CharField(max_length=9)
- name = models.CharField(max_length=255)
- pages = models.IntegerField()
- rating = models.FloatField()
- price = models.DecimalField(decimal_places=2, max_digits=6)
- authors = models.ManyToManyField(Author)
- contact = models.ForeignKey(Author, related_name='book_contact_set')
- publisher = models.ForeignKey(Publisher)
- pubdate = models.DateField()
-
- def __unicode__(self):
- return self.name
-
-class Store(models.Model):
- name = models.CharField(max_length=255)
- books = models.ManyToManyField(Book)
- original_opening = models.DateTimeField()
- friday_night_closing = models.TimeField()
-
- def __unicode__(self):
- return self.name
-
diff --git a/tests/django14/aggregation/tests.py b/tests/django14/aggregation/tests.py
deleted file mode 100644
index a35dbb34..00000000
--- a/tests/django14/aggregation/tests.py
+++ /dev/null
@@ -1,567 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-from decimal import Decimal
-
-from django.db.models import Avg, Sum, Count, Max, Min
-from django.test import TestCase, Approximate
-
-from .models import Author, Publisher, Book, Store
-
-
-class BaseAggregateTestCase(TestCase):
- fixtures = ["aggregation.json"]
-
- def test_empty_aggregate(self):
- self.assertEqual(Author.objects.all().aggregate(), {})
-
- def test_single_aggregate(self):
- vals = Author.objects.aggregate(Avg("age"))
- self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
-
- def test_multiple_aggregates(self):
- vals = Author.objects.aggregate(Sum("age"), Avg("age"))
- self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
-
- def test_filter_aggregate(self):
- vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
- self.assertEqual(len(vals), 1)
- self.assertEqual(vals["age__sum"], 254)
-
- def test_related_aggregate(self):
- vals = Author.objects.aggregate(Avg("friends__age"))
- self.assertEqual(len(vals), 1)
- self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
-
- vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
- self.assertEqual(len(vals), 1)
- self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
-
- vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
- self.assertEqual(len(vals), 1)
- self.assertEqual(vals["book__rating__avg"], 4.0)
-
- vals = Book.objects.aggregate(Sum("publisher__num_awards"))
- self.assertEqual(len(vals), 1)
- self.assertEqual(vals["publisher__num_awards__sum"], 30)
-
- vals = Publisher.objects.aggregate(Sum("book__price"))
- self.assertEqual(len(vals), 1)
- self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
-
- def test_aggregate_multi_join(self):
- vals = Store.objects.aggregate(Max("books__authors__age"))
- self.assertEqual(len(vals), 1)
- self.assertEqual(vals["books__authors__age__max"], 57)
-
- vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
- self.assertEqual(len(vals), 1)
- self.assertEqual(vals["book__publisher__num_awards__min"], 1)
-
- def test_aggregate_alias(self):
- vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
- self.assertEqual(len(vals), 1)
- self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
-
- def test_annotate_basic(self):
- self.assertQuerysetEqual(
- Book.objects.annotate().order_by('pk'), [
- "The Definitive Guide to Django: Web Development Done Right",
- "Sams Teach Yourself Django in 24 Hours",
- "Practical Django Projects",
- "Python Web Development with Django",
- "Artificial Intelligence: A Modern Approach",
- "Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
- ],
- lambda b: b.name
- )
-
- books = Book.objects.annotate(mean_age=Avg("authors__age"))
- b = books.get(pk=1)
- self.assertEqual(
- b.name,
- u'The Definitive Guide to Django: Web Development Done Right'
- )
- self.assertEqual(b.mean_age, 34.5)
-
- def test_annotate_m2m(self):
- books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
- self.assertQuerysetEqual(
- books, [
- (u'Artificial Intelligence: A Modern Approach', 51.5),
- (u'Practical Django Projects', 29.0),
- (u'Python Web Development with Django', Approximate(30.3, places=1)),
- (u'Sams Teach Yourself Django in 24 Hours', 45.0)
- ],
- lambda b: (b.name, b.authors__age__avg),
- )
-
- books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
- self.assertQuerysetEqual(
- books, [
- (u'Artificial Intelligence: A Modern Approach', 2),
- (u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
- (u'Practical Django Projects', 1),
- (u'Python Web Development with Django', 3),
- (u'Sams Teach Yourself Django in 24 Hours', 1),
- (u'The Definitive Guide to Django: Web Development Done Right', 2)
- ],
- lambda b: (b.name, b.num_authors)
- )
-
- def test_backwards_m2m_annotate(self):
- authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
- self.assertQuerysetEqual(
- authors, [
- (u'Adrian Holovaty', 4.5),
- (u'Brad Dayley', 3.0),
- (u'Jacob Kaplan-Moss', 4.5),
- (u'James Bennett', 4.0),
- (u'Paul Bissex', 4.0),
- (u'Stuart Russell', 4.0)
- ],
- lambda a: (a.name, a.book__rating__avg)
- )
-
- authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
- self.assertQuerysetEqual(
- authors, [
- (u'Adrian Holovaty', 1),
- (u'Brad Dayley', 1),
- (u'Jacob Kaplan-Moss', 1),
- (u'James Bennett', 1),
- (u'Jeffrey Forcier', 1),
- (u'Paul Bissex', 1),
- (u'Peter Norvig', 2),
- (u'Stuart Russell', 1),
- (u'Wesley J. Chun', 1)
- ],
- lambda a: (a.name, a.num_books)
- )
-
- def test_reverse_fkey_annotate(self):
- books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
- self.assertQuerysetEqual(
- books, [
- (u'Artificial Intelligence: A Modern Approach', 7),
- (u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
- (u'Practical Django Projects', 3),
- (u'Python Web Development with Django', 7),
- (u'Sams Teach Yourself Django in 24 Hours', 1),
- (u'The Definitive Guide to Django: Web Development Done Right', 3)
- ],
- lambda b: (b.name, b.publisher__num_awards__sum)
- )
-
- publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
- self.assertQuerysetEqual(
- publishers, [
- (u'Apress', Decimal("59.69")),
- (u"Jonno's House of Books", None),
- (u'Morgan Kaufmann', Decimal("75.00")),
- (u'Prentice Hall', Decimal("112.49")),
- (u'Sams', Decimal("23.09"))
- ],
- lambda p: (p.name, p.book__price__sum)
- )
-
- def test_annotate_values(self):
- books = list(Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values())
- self.assertEqual(
- books, [
- {
- "contact_id": 1,
- "id": 1,
- "isbn": "159059725",
- "mean_age": 34.5,
- "name": "The Definitive Guide to Django: Web Development Done Right",
- "pages": 447,
- "price": Approximate(Decimal("30")),
- "pubdate": datetime.date(2007, 12, 6),
- "publisher_id": 1,
- "rating": 4.5,
- }
- ]
- )
-
- books = Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
- self.assertEqual(
- list(books), [
- {
- "pk": 1,
- "isbn": "159059725",
- "mean_age": 34.5,
- }
- ]
- )
-
- books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values("name")
- self.assertEqual(
- list(books), [
- {
- "name": "The Definitive Guide to Django: Web Development Done Right"
- }
- ]
- )
-
- books = Book.objects.filter(pk=1).values().annotate(mean_age=Avg('authors__age'))
- self.assertEqual(
- list(books), [
- {
- "contact_id": 1,
- "id": 1,
- "isbn": "159059725",
- "mean_age": 34.5,
- "name": "The Definitive Guide to Django: Web Development Done Right",
- "pages": 447,
- "price": Approximate(Decimal("30")),
- "pubdate": datetime.date(2007, 12, 6),
- "publisher_id": 1,
- "rating": 4.5,
- }
- ]
- )
-
- books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
- self.assertEqual(
- list(books), [
- {
- "rating": 3.0,
- "n_authors": 1,
- "mean_age": 45.0,
- },
- {
- "rating": 4.0,
- "n_authors": 6,
- "mean_age": Approximate(37.16, places=1)
- },
- {
- "rating": 4.5,
- "n_authors": 2,
- "mean_age": 34.5,
- },
- {
- "rating": 5.0,
- "n_authors": 1,
- "mean_age": 57.0,
- }
- ]
- )
-
- authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
- self.assertEqual(len(authors), 9)
- self.assertQuerysetEqual(
- authors, [
- (u'Adrian Holovaty', 32.0),
- (u'Brad Dayley', None),
- (u'Jacob Kaplan-Moss', 29.5),
- (u'James Bennett', 34.0),
- (u'Jeffrey Forcier', 27.0),
- (u'Paul Bissex', 31.0),
- (u'Peter Norvig', 46.0),
- (u'Stuart Russell', 57.0),
- (u'Wesley J. Chun', Approximate(33.66, places=1))
- ],
- lambda a: (a.name, a.friends__age__avg)
- )
-
- def test_count(self):
- vals = Book.objects.aggregate(Count("rating"))
- self.assertEqual(vals, {"rating__count": 6})
-
- vals = Book.objects.aggregate(Count("rating", distinct=True))
- self.assertEqual(vals, {"rating__count": 4})
-
- def test_fkey_aggregate(self):
- explicit = list(Author.objects.annotate(Count('book__id')))
- implicit = list(Author.objects.annotate(Count('book')))
- self.assertEqual(explicit, implicit)
-
- def test_annotate_ordering(self):
- books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
- self.assertEqual(
- list(books), [
- {
- "rating": 4.5,
- "oldest": 35,
- },
- {
- "rating": 3.0,
- "oldest": 45
- },
- {
- "rating": 4.0,
- "oldest": 57,
- },
- {
- "rating": 5.0,
- "oldest": 57,
- }
- ]
- )
-
- books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
- self.assertEqual(
- list(books), [
- {
- "rating": 5.0,
- "oldest": 57,
- },
- {
- "rating": 4.0,
- "oldest": 57,
- },
- {
- "rating": 3.0,
- "oldest": 45,
- },
- {
- "rating": 4.5,
- "oldest": 35,
- }
- ]
- )
-
- def test_aggregate_annotation(self):
- vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
- self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
-
- def test_filtering(self):
- p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
- Book.objects.create(
- name='ExpensiveBook1',
- pages=1,
- isbn='111',
- rating=3.5,
- price=Decimal("1000"),
- publisher=p,
- contact_id=1,
- pubdate=datetime.date(2008,12,1)
- )
- Book.objects.create(
- name='ExpensiveBook2',
- pages=1,
- isbn='222',
- rating=4.0,
- price=Decimal("1000"),
- publisher=p,
- contact_id=1,
- pubdate=datetime.date(2008,12,2)
- )
- Book.objects.create(
- name='ExpensiveBook3',
- pages=1,
- isbn='333',
- rating=4.5,
- price=Decimal("35"),
- publisher=p,
- contact_id=1,
- pubdate=datetime.date(2008,12,3)
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- "Prentice Hall",
- "Expensive Publisher",
- ],
- lambda p: p.name,
- )
-
- publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- "Apress",
- "Sams",
- "Prentice Hall",
- "Expensive Publisher",
- ],
- lambda p: p.name
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- "Prentice Hall",
- "Expensive Publisher",
- ],
- lambda p: p.name,
- )
-
- publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- ],
- lambda p: p.name
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- "Sams",
- "Prentice Hall",
- "Morgan Kaufmann",
- "Expensive Publisher",
- ],
- lambda p: p.name
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- "Sams",
- "Prentice Hall",
- "Morgan Kaufmann",
- ],
- lambda p: p.name
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Sams",
- "Morgan Kaufmann",
- "Expensive Publisher",
- ],
- lambda p: p.name,
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
- self.assertEqual(len(publishers), 0)
-
- def test_annotation(self):
- vals = Author.objects.filter(pk=1).aggregate(Count("friends__id"))
- self.assertEqual(vals, {"friends__id__count": 2})
-
- books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__ge=2).order_by("pk")
- self.assertQuerysetEqual(
- books, [
- "The Definitive Guide to Django: Web Development Done Right",
- "Artificial Intelligence: A Modern Approach",
- ],
- lambda b: b.name
- )
-
- authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
- self.assertQuerysetEqual(
- authors, [
- "Brad Dayley",
- ],
- lambda a: a.name
- )
-
- publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- "Prentice Hall",
- ],
- lambda p: p.name
- )
-
- publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
- self.assertQuerysetEqual(
- publishers, [
- "Apress",
- ],
- lambda p: p.name
- )
-
- books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
- self.assertQuerysetEqual(
- books, [
- "Artificial Intelligence: A Modern Approach",
- ],
- lambda b: b.name
- )
-
- def test_more_aggregation(self):
- a = Author.objects.get(name__contains='Norvig')
- b = Book.objects.get(name__contains='Done Right')
- b.authors.add(a)
- b.save()
-
- vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
- self.assertEqual(vals, {"rating__avg": 4.25})
-
- def test_even_more_aggregate(self):
- publishers = Publisher.objects.annotate(earliest_book=Min("book__pubdate")).exclude(earliest_book=None).order_by("earliest_book").values()
- self.assertEqual(
- list(publishers), [
- {
- 'earliest_book': datetime.date(1991, 10, 15),
- 'num_awards': 9,
- 'id': 4,
- 'name': u'Morgan Kaufmann'
- },
- {
- 'earliest_book': datetime.date(1995, 1, 15),
- 'num_awards': 7,
- 'id': 3,
- 'name': u'Prentice Hall'
- },
- {
- 'earliest_book': datetime.date(2007, 12, 6),
- 'num_awards': 3,
- 'id': 1,
- 'name': u'Apress'
- },
- {
- 'earliest_book': datetime.date(2008, 3, 3),
- 'num_awards': 1,
- 'id': 2,
- 'name': u'Sams'
- }
- ]
- )
-
- vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
- self.assertEqual(
- vals,
- {
- "friday_night_closing__max": datetime.time(23, 59, 59),
- "original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
- }
- )
-
- def test_annotate_values_list(self):
- books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
- self.assertEqual(
- list(books), [
- (1, "159059725", 34.5),
- ]
- )
-
- books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("isbn")
- self.assertEqual(
- list(books), [
- ('159059725',)
- ]
- )
-
- books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
- self.assertEqual(
- list(books), [
- (34.5,)
- ]
- )
-
- books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
- self.assertEqual(list(books), [34.5])
-
- books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
- self.assertEqual(
- list(books), [
- (Decimal("29.69"), 2),
- (Decimal('23.09'), 1),
- (Decimal('30'), 1),
- (Decimal('75'), 1),
- (Decimal('82.8'), 1),
- ]
- )
diff --git a/tests/django14/aggregation_regress/models.py b/tests/django14/aggregation_regress/models.py
deleted file mode 100644
index ccef9a5f..00000000
--- a/tests/django14/aggregation_regress/models.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# coding: utf-8
-from django.db import models
-
-
-class Author(models.Model):
- name = models.CharField(max_length=100)
- age = models.IntegerField()
- friends = models.ManyToManyField('self', blank=True)
-
- def __unicode__(self):
- return self.name
-
-
-class Publisher(models.Model):
- name = models.CharField(max_length=255)
- num_awards = models.IntegerField()
-
- def __unicode__(self):
- return self.name
-
-
-class Book(models.Model):
- isbn = models.CharField(max_length=9)
- name = models.CharField(max_length=255)
- pages = models.IntegerField()
- rating = models.FloatField()
- price = models.DecimalField(decimal_places=2, max_digits=6)
- authors = models.ManyToManyField(Author)
- contact = models.ForeignKey(Author, related_name='book_contact_set')
- publisher = models.ForeignKey(Publisher)
- pubdate = models.DateField()
-
- class Meta:
- ordering = ('name',)
-
- def __unicode__(self):
- return self.name
-
-
-class Store(models.Model):
- name = models.CharField(max_length=255)
- books = models.ManyToManyField(Book)
- original_opening = models.DateTimeField()
- friday_night_closing = models.TimeField()
-
- def __unicode__(self):
- return self.name
-
-class Entries(models.Model):
- EntryID = models.AutoField(primary_key=True, db_column='Entry ID')
- Entry = models.CharField(unique=True, max_length=50)
- Exclude = models.BooleanField()
-
-
-class Clues(models.Model):
- ID = models.AutoField(primary_key=True)
- EntryID = models.ForeignKey(Entries, verbose_name='Entry', db_column = 'Entry ID')
- Clue = models.CharField(max_length=150)
-
-
-class HardbackBook(Book):
- weight = models.FloatField()
-
- def __unicode__(self):
- return "%s (hardback): %s" % (self.name, self.weight)
diff --git a/tests/django14/aggregation_regress/tests.py b/tests/django14/aggregation_regress/tests.py
deleted file mode 100644
index 36a54c0b..00000000
--- a/tests/django14/aggregation_regress/tests.py
+++ /dev/null
@@ -1,867 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-import pickle
-from decimal import Decimal
-from operator import attrgetter
-
-from django.core.exceptions import FieldError
-from django.db.models import Count, Max, Avg, Sum, StdDev, Variance, F, Q
-from django.test import TestCase, Approximate, skipUnlessDBFeature
-
-from .models import Author, Book, Publisher, Clues, Entries, HardbackBook
-
-
-class AggregationTests(TestCase):
- fixtures = ["aggregation_regress.json"]
-
- def assertObjectAttrs(self, obj, **kwargs):
- for attr, value in kwargs.iteritems():
- self.assertEqual(getattr(obj, attr), value)
-
- def test_aggregates_in_where_clause(self):
- """
- Regression test for #12822: DatabaseError: aggregates not allowed in
- WHERE clause
-
- Tests that the subselect works and returns results equivalent to a
- query with the IDs listed.
-
- Before the corresponding fix for this bug, this test passed in 1.1 and
- failed in 1.2-beta (trunk).
- """
- qs = Book.objects.values('contact').annotate(Max('id'))
- qs = qs.order_by('contact').values_list('id__max', flat=True)
- # don't do anything with the queryset (qs) before including it as a
- # subquery
- books = Book.objects.order_by('id')
- qs1 = books.filter(id__in=qs)
- qs2 = books.filter(id__in=list(qs))
- self.assertEqual(list(qs1), list(qs2))
-
- def test_aggregates_in_where_clause_pre_eval(self):
- """
- Regression test for #12822: DatabaseError: aggregates not allowed in
- WHERE clause
-
- Same as the above test, but evaluates the queryset for the subquery
- before it's used as a subquery.
-
- Before the corresponding fix for this bug, this test failed in both
- 1.1 and 1.2-beta (trunk).
- """
- qs = Book.objects.values('contact').annotate(Max('id'))
- qs = qs.order_by('contact').values_list('id__max', flat=True)
- # force the queryset (qs) for the subquery to be evaluated in its
- # current state
- list(qs)
- books = Book.objects.order_by('id')
- qs1 = books.filter(id__in=qs)
- qs2 = books.filter(id__in=list(qs))
- self.assertEqual(list(qs1), list(qs2))
-
- @skipUnlessDBFeature('supports_subqueries_in_group_by')
- def test_annotate_with_extra(self):
- """
- Regression test for #11916: Extra params + aggregation creates
- incorrect SQL.
- """
- #oracle doesn't support subqueries in group by clause
- shortest_book_sql = """
- SELECT name
- FROM aggregation_regress_book b
- WHERE b.publisher_id = aggregation_regress_publisher.id
- ORDER BY b.pages
- LIMIT 1
- """
- # tests that this query does not raise a DatabaseError due to the full
- # subselect being (erroneously) added to the GROUP BY parameters
- qs = Publisher.objects.extra(select={
- 'name_of_shortest_book': shortest_book_sql,
- }).annotate(total_books=Count('book'))
- # force execution of the query
- list(qs)
-
- def test_aggregate(self):
- # Ordering requests are ignored
- self.assertEqual(
- Author.objects.order_by("name").aggregate(Avg("age")),
- {"age__avg": Approximate(37.444, places=1)}
- )
-
- # Implicit ordering is also ignored
- self.assertEqual(
- Book.objects.aggregate(Sum("pages")),
- {"pages__sum": 3703},
- )
-
- # Baseline results
- self.assertEqual(
- Book.objects.aggregate(Sum('pages'), Avg('pages')),
- {'pages__sum': 3703, 'pages__avg': Approximate(617.166, places=2)}
- )
-
- # Empty values query doesn't affect grouping or results
- self.assertEqual(
- Book.objects.values().aggregate(Sum('pages'), Avg('pages')),
- {'pages__sum': 3703, 'pages__avg': Approximate(617.166, places=2)}
- )
-
- # Aggregate overrides extra selected column
- self.assertEqual(
- Book.objects.extra(select={'price_per_page' : 'price / pages'}).aggregate(Sum('pages')),
- {'pages__sum': 3703}
- )
-
- def test_annotation(self):
- # Annotations get combined with extra select clauses
- obj = Book.objects.annotate(mean_auth_age=Avg("authors__age")).extra(select={"manufacture_cost": "price * .5"}).get(pk=2)
- self.assertObjectAttrs(obj,
- contact_id=3,
- id=2,
- isbn=u'067232959',
- mean_auth_age=45.0,
- name='Sams Teach Yourself Django in 24 Hours',
- pages=528,
- price=Decimal("23.09"),
- pubdate=datetime.date(2008, 3, 3),
- publisher_id=2,
- rating=3.0
- )
- # Different DB backends return different types for the extra select computation
- self.assertTrue(obj.manufacture_cost == 11.545 or obj.manufacture_cost == Decimal('11.545'))
-
- # Order of the annotate/extra in the query doesn't matter
- obj = Book.objects.extra(select={'manufacture_cost' : 'price * .5'}).annotate(mean_auth_age=Avg('authors__age')).get(pk=2)
- self.assertObjectAttrs(obj,
- contact_id=3,
- id=2,
- isbn=u'067232959',
- mean_auth_age=45.0,
- name=u'Sams Teach Yourself Django in 24 Hours',
- pages=528,
- price=Decimal("23.09"),
- pubdate=datetime.date(2008, 3, 3),
- publisher_id=2,
- rating=3.0
- )
- # Different DB backends return different types for the extra select computation
- self.assertTrue(obj.manufacture_cost == 11.545 or obj.manufacture_cost == Decimal('11.545'))
-
- # Values queries can be combined with annotate and extra
- obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'manufacture_cost' : 'price * .5'}).values().get(pk=2)
- manufacture_cost = obj['manufacture_cost']
- self.assertTrue(manufacture_cost == 11.545 or manufacture_cost == Decimal('11.545'))
- del obj['manufacture_cost']
- self.assertEqual(obj, {
- "contact_id": 3,
- "id": 2,
- "isbn": u"067232959",
- "mean_auth_age": 45.0,
- "name": u"Sams Teach Yourself Django in 24 Hours",
- "pages": 528,
- "price": Decimal("23.09"),
- "pubdate": datetime.date(2008, 3, 3),
- "publisher_id": 2,
- "rating": 3.0,
- })
-
- # The order of the (empty) values, annotate and extra clauses doesn't
- # matter
- obj = Book.objects.values().annotate(mean_auth_age=Avg('authors__age')).extra(select={'manufacture_cost' : 'price * .5'}).get(pk=2)
- manufacture_cost = obj['manufacture_cost']
- self.assertTrue(manufacture_cost == 11.545 or manufacture_cost == Decimal('11.545'))
- del obj['manufacture_cost']
- self.assertEqual(obj, {
- 'contact_id': 3,
- 'id': 2,
- 'isbn': u'067232959',
- 'mean_auth_age': 45.0,
- 'name': u'Sams Teach Yourself Django in 24 Hours',
- 'pages': 528,
- 'price': Decimal("23.09"),
- 'pubdate': datetime.date(2008, 3, 3),
- 'publisher_id': 2,
- 'rating': 3.0
- })
-
- # If the annotation precedes the values clause, it won't be included
- # unless it is explicitly named
- obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'price_per_page' : 'price / pages'}).values('name').get(pk=1)
- self.assertEqual(obj, {
- "name": u'The Definitive Guide to Django: Web Development Done Right',
- })
-
- obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'price_per_page' : 'price / pages'}).values('name','mean_auth_age').get(pk=1)
- self.assertEqual(obj, {
- 'mean_auth_age': 34.5,
- 'name': u'The Definitive Guide to Django: Web Development Done Right',
- })
-
- # If an annotation isn't included in the values, it can still be used
- # in a filter
- qs = Book.objects.annotate(n_authors=Count('authors')).values('name').filter(n_authors__gt=2)
- self.assertQuerysetEqual(
- qs, [
- {"name": u'Python Web Development with Django'}
- ],
- lambda b: b,
- )
-
- # The annotations are added to values output if values() precedes
- # annotate()
- obj = Book.objects.values('name').annotate(mean_auth_age=Avg('authors__age')).extra(select={'price_per_page' : 'price / pages'}).get(pk=1)
- self.assertEqual(obj, {
- 'mean_auth_age': 34.5,
- 'name': u'The Definitive Guide to Django: Web Development Done Right',
- })
-
- # Check that all of the objects are getting counted (allow_nulls) and
- # that values respects the amount of objects
- self.assertEqual(
- len(Author.objects.annotate(Avg('friends__age')).values()),
- 9
- )
-
- # Check that consecutive calls to annotate accumulate in the query
- qs = Book.objects.values('price').annotate(oldest=Max('authors__age')).order_by('oldest', 'price').annotate(Max('publisher__num_awards'))
- self.assertQuerysetEqual(
- qs, [
- {'price': Decimal("30"), 'oldest': 35, 'publisher__num_awards__max': 3},
- {'price': Decimal("29.69"), 'oldest': 37, 'publisher__num_awards__max': 7},
- {'price': Decimal("23.09"), 'oldest': 45, 'publisher__num_awards__max': 1},
- {'price': Decimal("75"), 'oldest': 57, 'publisher__num_awards__max': 9},
- {'price': Decimal("82.8"), 'oldest': 57, 'publisher__num_awards__max': 7}
- ],
- lambda b: b,
- )
-
- def test_aggrate_annotation(self):
- # Aggregates can be composed over annotations.
- # The return type is derived from the composed aggregate
- vals = Book.objects.all().annotate(num_authors=Count('authors__id')).aggregate(Max('pages'), Max('price'), Sum('num_authors'), Avg('num_authors'))
- self.assertEqual(vals, {
- 'num_authors__sum': 10,
- 'num_authors__avg': Approximate(1.666, places=2),
- 'pages__max': 1132,
- 'price__max': Decimal("82.80")
- })
-
- def test_field_error(self):
- # Bad field requests in aggregates are caught and reported
- self.assertRaises(
- FieldError,
- lambda: Book.objects.all().aggregate(num_authors=Count('foo'))
- )
-
- self.assertRaises(
- FieldError,
- lambda: Book.objects.all().annotate(num_authors=Count('foo'))
- )
-
- self.assertRaises(
- FieldError,
- lambda: Book.objects.all().annotate(num_authors=Count('authors__id')).aggregate(Max('foo'))
- )
-
- def test_more(self):
- # Old-style count aggregations can be mixed with new-style
- self.assertEqual(
- Book.objects.annotate(num_authors=Count('authors')).count(),
- 6
- )
-
- # Non-ordinal, non-computed Aggregates over annotations correctly
- # inherit the annotation's internal type if the annotation is ordinal
- # or computed
- vals = Book.objects.annotate(num_authors=Count('authors')).aggregate(Max('num_authors'))
- self.assertEqual(
- vals,
- {'num_authors__max': 3}
- )
-
- vals = Publisher.objects.annotate(avg_price=Avg('book__price')).aggregate(Max('avg_price'))
- self.assertEqual(
- vals,
- {'avg_price__max': 75.0}
- )
-
- # Aliases are quoted to protected aliases that might be reserved names
- vals = Book.objects.aggregate(number=Max('pages'), select=Max('pages'))
- self.assertEqual(
- vals,
- {'number': 1132, 'select': 1132}
- )
-
- # Regression for #10064: select_related() plays nice with aggregates
- obj = Book.objects.select_related('publisher').annotate(num_authors=Count('authors')).values()[0]
- self.assertEqual(obj, {
- 'contact_id': 8,
- 'id': 5,
- 'isbn': u'013790395',
- 'name': u'Artificial Intelligence: A Modern Approach',
- 'num_authors': 2,
- 'pages': 1132,
- 'price': Decimal("82.8"),
- 'pubdate': datetime.date(1995, 1, 15),
- 'publisher_id': 3,
- 'rating': 4.0,
- })
-
- # Regression for #10010: exclude on an aggregate field is correctly
- # negated
- self.assertEqual(
- len(Book.objects.annotate(num_authors=Count('authors'))),
- 6
- )
- self.assertEqual(
- len(Book.objects.annotate(num_authors=Count('authors')).filter(num_authors__gt=2)),
- 1
- )
- self.assertEqual(
- len(Book.objects.annotate(num_authors=Count('authors')).exclude(num_authors__gt=2)),
- 5
- )
-
- self.assertEqual(
- len(Book.objects.annotate(num_authors=Count('authors')).filter(num_authors__lt=3).exclude(num_authors__lt=2)),
- 2
- )
- self.assertEqual(
- len(Book.objects.annotate(num_authors=Count('authors')).exclude(num_authors__lt=2).filter(num_authors__lt=3)),
- 2
- )
-
- def test_aggregate_fexpr(self):
- # Aggregates can be used with F() expressions
- # ... where the F() is pushed into the HAVING clause
- qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_books__lt=F('num_awards')/2).order_by('name').values('name','num_books','num_awards')
- self.assertQuerysetEqual(
- qs, [
- {'num_books': 1, 'name': u'Morgan Kaufmann', 'num_awards': 9},
- {'num_books': 2, 'name': u'Prentice Hall', 'num_awards': 7}
- ],
- lambda p: p,
- )
-
- qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards')/2).order_by('name').values('name','num_books','num_awards')
- self.assertQuerysetEqual(
- qs, [
- {'num_books': 2, 'name': u'Apress', 'num_awards': 3},
- {'num_books': 0, 'name': u"Jonno's House of Books", 'num_awards': 0},
- {'num_books': 1, 'name': u'Sams', 'num_awards': 1}
- ],
- lambda p: p,
- )
-
- # ... and where the F() references an aggregate
- qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_awards__gt=2*F('num_books')).order_by('name').values('name','num_books','num_awards')
- self.assertQuerysetEqual(
- qs, [
- {'num_books': 1, 'name': u'Morgan Kaufmann', 'num_awards': 9},
- {'num_books': 2, 'name': u'Prentice Hall', 'num_awards': 7}
- ],
- lambda p: p,
- )
-
- qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards')/2).order_by('name').values('name','num_books','num_awards')
- self.assertQuerysetEqual(
- qs, [
- {'num_books': 2, 'name': u'Apress', 'num_awards': 3},
- {'num_books': 0, 'name': u"Jonno's House of Books", 'num_awards': 0},
- {'num_books': 1, 'name': u'Sams', 'num_awards': 1}
- ],
- lambda p: p,
- )
-
- def test_db_col_table(self):
- # Tests on fields with non-default table and column names.
- qs = Clues.objects.values('EntryID__Entry').annotate(Appearances=Count('EntryID'), Distinct_Clues=Count('Clue', distinct=True))
- self.assertQuerysetEqual(qs, [])
-
- qs = Entries.objects.annotate(clue_count=Count('clues__ID'))
- self.assertQuerysetEqual(qs, [])
-
- def test_empty(self):
- # Regression for #10089: Check handling of empty result sets with
- # aggregates
- self.assertEqual(
- Book.objects.filter(id__in=[]).count(),
- 0
- )
-
- vals = Book.objects.filter(id__in=[]).aggregate(num_authors=Count('authors'), avg_authors=Avg('authors'), max_authors=Max('authors'), max_price=Max('price'), max_rating=Max('rating'))
- self.assertEqual(
- vals,
- {'max_authors': None, 'max_rating': None, 'num_authors': 0, 'avg_authors': None, 'max_price': None}
- )
-
- qs = Publisher.objects.filter(pk=5).annotate(num_authors=Count('book__authors'), avg_authors=Avg('book__authors'), max_authors=Max('book__authors'), max_price=Max('book__price'), max_rating=Max('book__rating')).values()
- self.assertQuerysetEqual(
- qs, [
- {'max_authors': None, 'name': u"Jonno's House of Books", 'num_awards': 0, 'max_price': None, 'num_authors': 0, 'max_rating': None, 'id': 5, 'avg_authors': None}
- ],
- lambda p: p
- )
-
- def test_more_more(self):
- # Regression for #10113 - Fields mentioned in order_by() must be
- # included in the GROUP BY. This only becomes a problem when the
- # order_by introduces a new join.
- self.assertQuerysetEqual(
- Book.objects.annotate(num_authors=Count('authors')).order_by('publisher__name', 'name'), [
- "Practical Django Projects",
- "The Definitive Guide to Django: Web Development Done Right",
- "Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp",
- "Artificial Intelligence: A Modern Approach",
- "Python Web Development with Django",
- "Sams Teach Yourself Django in 24 Hours",
- ],
- lambda b: b.name
- )
-
- # Regression for #10127 - Empty select_related() works with annotate
- qs = Book.objects.filter(rating__lt=4.5).select_related().annotate(Avg('authors__age'))
- self.assertQuerysetEqual(
- qs, [
- (u'Artificial Intelligence: A Modern Approach', 51.5, u'Prentice Hall', u'Peter Norvig'),
- (u'Practical Django Projects', 29.0, u'Apress', u'James Bennett'),
- (u'Python Web Development with Django', Approximate(30.333, places=2), u'Prentice Hall', u'Jeffrey Forcier'),
- (u'Sams Teach Yourself Django in 24 Hours', 45.0, u'Sams', u'Brad Dayley')
- ],
- lambda b: (b.name, b.authors__age__avg, b.publisher.name, b.contact.name)
- )
-
- # Regression for #10132 - If the values() clause only mentioned extra
- # (select=) columns, those columns are used for grouping
- qs = Book.objects.extra(select={'pub':'publisher_id'}).values('pub').annotate(Count('id')).order_by('pub')
- self.assertQuerysetEqual(
- qs, [
- {'pub': 1, 'id__count': 2},
- {'pub': 2, 'id__count': 1},
- {'pub': 3, 'id__count': 2},
- {'pub': 4, 'id__count': 1}
- ],
- lambda b: b
- )
-
- qs = Book.objects.extra(select={'pub':'publisher_id', 'foo':'pages'}).values('pub').annotate(Count('id')).order_by('pub')
- self.assertQuerysetEqual(
- qs, [
- {'pub': 1, 'id__count': 2},
- {'pub': 2, 'id__count': 1},
- {'pub': 3, 'id__count': 2},
- {'pub': 4, 'id__count': 1}
- ],
- lambda b: b
- )
-
- # Regression for #10182 - Queries with aggregate calls are correctly
- # realiased when used in a subquery
- ids = Book.objects.filter(pages__gt=100).annotate(n_authors=Count('authors')).filter(n_authors__gt=2).order_by('n_authors')
- self.assertQuerysetEqual(
- Book.objects.filter(id__in=ids), [
- "Python Web Development with Django",
- ],
- lambda b: b.name
- )
-
- # Regression for #15709 - Ensure each group_by field only exists once
- # per query
- qs = Book.objects.values('publisher').annotate(max_pages=Max('pages')).order_by()
- grouping, gb_params = qs.query.get_compiler(qs.db).get_grouping()
- self.assertEqual(len(grouping), 1)
-
- def test_duplicate_alias(self):
- # Regression for #11256 - duplicating a default alias raises ValueError.
- self.assertRaises(ValueError, Book.objects.all().annotate, Avg('authors__age'), authors__age__avg=Avg('authors__age'))
-
- def test_field_name_conflict(self):
- # Regression for #11256 - providing an aggregate name that conflicts with a field name on the model raises ValueError
- self.assertRaises(ValueError, Author.objects.annotate, age=Avg('friends__age'))
-
- def test_m2m_name_conflict(self):
- # Regression for #11256 - providing an aggregate name that conflicts with an m2m name on the model raises ValueError
- self.assertRaises(ValueError, Author.objects.annotate, friends=Count('friends'))
-
- def test_values_queryset_non_conflict(self):
- # Regression for #14707 -- If you're using a values query set, some potential conflicts are avoided.
-
- # age is a field on Author, so it shouldn't be allowed as an aggregate.
- # But age isn't included in the ValuesQuerySet, so it is.
- results = Author.objects.values('name').annotate(age=Count('book_contact_set')).order_by('name')
- self.assertEqual(len(results), 9)
- self.assertEqual(results[0]['name'], u'Adrian Holovaty')
- self.assertEqual(results[0]['age'], 1)
-
- # Same problem, but aggregating over m2m fields
- results = Author.objects.values('name').annotate(age=Avg('friends__age')).order_by('name')
- self.assertEqual(len(results), 9)
- self.assertEqual(results[0]['name'], u'Adrian Holovaty')
- self.assertEqual(results[0]['age'], 32.0)
-
- # Same problem, but colliding with an m2m field
- results = Author.objects.values('name').annotate(friends=Count('friends')).order_by('name')
- self.assertEqual(len(results), 9)
- self.assertEqual(results[0]['name'], u'Adrian Holovaty')
- self.assertEqual(results[0]['friends'], 2)
-
- def test_reverse_relation_name_conflict(self):
- # Regression for #11256 - providing an aggregate name that conflicts with a reverse-related name on the model raises ValueError
- self.assertRaises(ValueError, Author.objects.annotate, book_contact_set=Avg('friends__age'))
-
- def test_pickle(self):
- # Regression for #10197 -- Queries with aggregates can be pickled.
- # First check that pickling is possible at all. No crash = success
- qs = Book.objects.annotate(num_authors=Count('authors'))
- pickle.dumps(qs)
-
- # Then check that the round trip works.
- query = qs.query.get_compiler(qs.db).as_sql()[0]
- qs2 = pickle.loads(pickle.dumps(qs))
- self.assertEqual(
- qs2.query.get_compiler(qs2.db).as_sql()[0],
- query,
- )
-
- def test_more_more_more(self):
- # Regression for #10199 - Aggregate calls clone the original query so
- # the original query can still be used
- books = Book.objects.all()
- books.aggregate(Avg("authors__age"))
- self.assertQuerysetEqual(
- books.all(), [
- u'Artificial Intelligence: A Modern Approach',
- u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
- u'Practical Django Projects',
- u'Python Web Development with Django',
- u'Sams Teach Yourself Django in 24 Hours',
- u'The Definitive Guide to Django: Web Development Done Right'
- ],
- lambda b: b.name
- )
-
- # Regression for #10248 - Annotations work with DateQuerySets
- qs = Book.objects.annotate(num_authors=Count('authors')).filter(num_authors=2).dates('pubdate', 'day')
- self.assertQuerysetEqual(
- qs, [
- datetime.datetime(1995, 1, 15, 0, 0),
- datetime.datetime(2007, 12, 6, 0, 0)
- ],
- lambda b: b
- )
-
- # Regression for #10290 - extra selects with parameters can be used for
- # grouping.
- qs = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'sheets' : '(pages + %s) / %s'}, select_params=[1, 2]).order_by('sheets').values('sheets')
- self.assertQuerysetEqual(
- qs, [
- 150,
- 175,
- 224,
- 264,
- 473,
- 566
- ],
- lambda b: int(b["sheets"])
- )
-
- # Regression for 10425 - annotations don't get in the way of a count()
- # clause
- self.assertEqual(
- Book.objects.values('publisher').annotate(Count('publisher')).count(),
- 4
- )
- self.assertEqual(
- Book.objects.annotate(Count('publisher')).values('publisher').count(),
- 6
- )
-
- publishers = Publisher.objects.filter(id__in=[1, 2])
- self.assertEqual(
- sorted(p.name for p in publishers),
- [
- "Apress",
- "Sams"
- ]
- )
-
- publishers = publishers.annotate(n_books=Count("book"))
- self.assertEqual(
- publishers[0].n_books,
- 2
- )
-
- self.assertEqual(
- sorted(p.name for p in publishers),
- [
- "Apress",
- "Sams"
- ]
- )
-
- books = Book.objects.filter(publisher__in=publishers)
- self.assertQuerysetEqual(
- books, [
- "Practical Django Projects",
- "Sams Teach Yourself Django in 24 Hours",
- "The Definitive Guide to Django: Web Development Done Right",
- ],
- lambda b: b.name
- )
- self.assertEqual(
- sorted(p.name for p in publishers),
- [
- "Apress",
- "Sams"
- ]
- )
-
- # Regression for 10666 - inherited fields work with annotations and
- # aggregations
- self.assertEqual(
- HardbackBook.objects.aggregate(n_pages=Sum('book_ptr__pages')),
- {'n_pages': 2078}
- )
-
- self.assertEqual(
- HardbackBook.objects.aggregate(n_pages=Sum('pages')),
- {'n_pages': 2078},
- )
-
- qs = HardbackBook.objects.annotate(n_authors=Count('book_ptr__authors')).values('name', 'n_authors')
- self.assertQuerysetEqual(
- qs, [
- {'n_authors': 2, 'name': u'Artificial Intelligence: A Modern Approach'},
- {'n_authors': 1, 'name': u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'}
- ],
- lambda h: h
- )
-
- qs = HardbackBook.objects.annotate(n_authors=Count('authors')).values('name', 'n_authors')
- self.assertQuerysetEqual(
- qs, [
- {'n_authors': 2, 'name': u'Artificial Intelligence: A Modern Approach'},
- {'n_authors': 1, 'name': u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'}
- ],
- lambda h: h,
- )
-
- # Regression for #10766 - Shouldn't be able to reference an aggregate
- # fields in an aggregate() call.
- self.assertRaises(
- FieldError,
- lambda: Book.objects.annotate(mean_age=Avg('authors__age')).annotate(Avg('mean_age'))
- )
-
- def test_empty_filter_count(self):
- self.assertEqual(
- Author.objects.filter(id__in=[]).annotate(Count("friends")).count(),
- 0
- )
-
- def test_empty_filter_aggregate(self):
- self.assertEqual(
- Author.objects.filter(id__in=[]).annotate(Count("friends")).aggregate(Count("pk")),
- {"pk__count": None}
- )
-
- def test_none_call_before_aggregate(self):
- # Regression for #11789
- self.assertEqual(
- Author.objects.none().aggregate(Avg('age')),
- {'age__avg': None}
- )
-
- def test_annotate_and_join(self):
- self.assertEqual(
- Author.objects.annotate(c=Count("friends__name")).exclude(friends__name="Joe").count(),
- Author.objects.count()
- )
-
- def test_f_expression_annotation(self):
- # Books with less than 200 pages per author.
- qs = Book.objects.values("name").annotate(
- n_authors=Count("authors")
- ).filter(
- pages__lt=F("n_authors") * 200
- ).values_list("pk")
- self.assertQuerysetEqual(
- Book.objects.filter(pk__in=qs), [
- "Python Web Development with Django"
- ],
- attrgetter("name")
- )
-
- def test_values_annotate_values(self):
- qs = Book.objects.values("name").annotate(
- n_authors=Count("authors")
- ).values_list("pk", flat=True)
- self.assertEqual(list(qs), list(Book.objects.values_list("pk", flat=True)))
-
- def test_having_group_by(self):
- # Test that when a field occurs on the LHS of a HAVING clause that it
- # appears correctly in the GROUP BY clause
- qs = Book.objects.values_list("name").annotate(
- n_authors=Count("authors")
- ).filter(
- pages__gt=F("n_authors")
- ).values_list("name", flat=True)
- # Results should be the same, all Books have more pages than authors
- self.assertEqual(
- list(qs), list(Book.objects.values_list("name", flat=True))
- )
-
- def test_annotation_disjunction(self):
- qs = Book.objects.annotate(n_authors=Count("authors")).filter(
- Q(n_authors=2) | Q(name="Python Web Development with Django")
- )
- self.assertQuerysetEqual(
- qs, [
- "Artificial Intelligence: A Modern Approach",
- "Python Web Development with Django",
- "The Definitive Guide to Django: Web Development Done Right",
- ],
- attrgetter("name")
- )
-
- qs = Book.objects.annotate(n_authors=Count("authors")).filter(
- Q(name="The Definitive Guide to Django: Web Development Done Right") | (Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
- )
- self.assertQuerysetEqual(
- qs, [
- "The Definitive Guide to Django: Web Development Done Right",
- ],
- attrgetter("name")
- )
-
- qs = Publisher.objects.annotate(
- rating_sum=Sum("book__rating"),
- book_count=Count("book")
- ).filter(
- Q(rating_sum__gt=5.5) | Q(rating_sum__isnull=True)
- ).order_by('pk')
- self.assertQuerysetEqual(
- qs, [
- "Apress",
- "Prentice Hall",
- "Jonno's House of Books",
- ],
- attrgetter("name")
- )
-
- qs = Publisher.objects.annotate(
- rating_sum=Sum("book__rating"),
- book_count=Count("book")
- ).filter(
- Q(pk__lt=F("book_count")) | Q(rating_sum=None)
- ).order_by("pk")
- self.assertQuerysetEqual(
- qs, [
- "Apress",
- "Jonno's House of Books",
- ],
- attrgetter("name")
- )
-
- def test_quoting_aggregate_order_by(self):
- qs = Book.objects.filter(
- name="Python Web Development with Django"
- ).annotate(
- authorCount=Count("authors")
- ).order_by("authorCount")
- self.assertQuerysetEqual(
- qs, [
- ("Python Web Development with Django", 3),
- ],
- lambda b: (b.name, b.authorCount)
- )
-
- @skipUnlessDBFeature('supports_stddev')
- def test_stddev(self):
- self.assertEqual(
- Book.objects.aggregate(StdDev('pages')),
- {'pages__stddev': Approximate(311.46, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(StdDev('rating')),
- {'rating__stddev': Approximate(0.60, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(StdDev('price')),
- {'price__stddev': Approximate(24.16, 2)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(StdDev('pages', sample=True)),
- {'pages__stddev': Approximate(341.19, 2)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(StdDev('rating', sample=True)),
- {'rating__stddev': Approximate(0.66, 2)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(StdDev('price', sample=True)),
- {'price__stddev': Approximate(26.46, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(Variance('pages')),
- {'pages__variance': Approximate(97010.80, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(Variance('rating')),
- {'rating__variance': Approximate(0.36, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(Variance('price')),
- {'price__variance': Approximate(583.77, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(Variance('pages', sample=True)),
- {'pages__variance': Approximate(116412.96, 1)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(Variance('rating', sample=True)),
- {'rating__variance': Approximate(0.44, 2)}
- )
-
- self.assertEqual(
- Book.objects.aggregate(Variance('price', sample=True)),
- {'price__variance': Approximate(700.53, 2)}
- )
-
- def test_filtering_by_annotation_name(self):
- # Regression test for #14476
-
- # The name of the explicitly provided annotation name in this case
- # poses no problem
- qs = Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2)
- self.assertQuerysetEqual(
- qs,
- ['Peter Norvig'],
- lambda b: b.name
- )
- # Neither in this case
- qs = Author.objects.annotate(book_count=Count('book')).filter(book_count=2)
- self.assertQuerysetEqual(
- qs,
- ['Peter Norvig'],
- lambda b: b.name
- )
- # This case used to fail because the ORM couldn't resolve the
- # automatically generated annotation name `book__count`
- qs = Author.objects.annotate(Count('book')).filter(book__count=2)
- self.assertQuerysetEqual(
- qs,
- ['Peter Norvig'],
- lambda b: b.name
- )
diff --git a/tests/django14/basic/models.py b/tests/django14/basic/models.py
deleted file mode 100644
index 06aa9cf3..00000000
--- a/tests/django14/basic/models.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# coding: utf-8
-"""
-1. Bare-bones model
-
-This is a basic model with only two non-primary-key fields.
-"""
-from django.db import models
-
-
-class Article(models.Model):
- headline = models.CharField(max_length=100, default='Default headline')
- pub_date = models.DateTimeField()
-
- class Meta:
- ordering = ('pub_date','headline')
-
- def __unicode__(self):
- return self.headline
diff --git a/tests/django14/basic/tests.py b/tests/django14/basic/tests.py
deleted file mode 100644
index f9141dc6..00000000
--- a/tests/django14/basic/tests.py
+++ /dev/null
@@ -1,580 +0,0 @@
-from __future__ import absolute_import
-
-from datetime import datetime
-
-from django.core.exceptions import ObjectDoesNotExist
-from django.db.models.fields import FieldDoesNotExist
-from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
-from django.utils.translation import ugettext_lazy
-
-from .models import Article
-
-
-class ModelTest(TestCase):
-
- def test_lookup(self):
- # No articles are in the system yet.
- self.assertQuerysetEqual(Article.objects.all(), [])
-
- # Create an Article.
- a = Article(
- id=None,
- headline='Area man programs in Python',
- pub_date=datetime(2005, 7, 28),
- )
-
- # Save it into the database. You have to call save() explicitly.
- a.save()
-
- # Now it has an ID.
- self.assertTrue(a.id != None)
-
- # Models have a pk property that is an alias for the primary key
- # attribute (by default, the 'id' attribute).
- self.assertEqual(a.pk, a.id)
-
- # Access database columns via Python attributes.
- self.assertEqual(a.headline, 'Area man programs in Python')
- self.assertEqual(a.pub_date, datetime(2005, 7, 28, 0, 0))
-
- # Change values by changing the attributes, then calling save().
- a.headline = 'Area woman programs in Python'
- a.save()
-
- # Article.objects.all() returns all the articles in the database.
- self.assertQuerysetEqual(Article.objects.all(),
- [''])
-
- # Django provides a rich database lookup API.
- self.assertEqual(Article.objects.get(id__exact=a.id), a)
- self.assertEqual(Article.objects.get(headline__startswith='Area woman'), a)
- self.assertEqual(Article.objects.get(pub_date__year=2005), a)
- self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7), a)
- self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28), a)
- self.assertEqual(Article.objects.get(pub_date__week_day=5), a)
-
- # The "__exact" lookup type can be omitted, as a shortcut.
- self.assertEqual(Article.objects.get(id=a.id), a)
- self.assertEqual(Article.objects.get(headline='Area woman programs in Python'), a)
-
- self.assertQuerysetEqual(
- Article.objects.filter(pub_date__year=2005),
- [''],
- )
- self.assertQuerysetEqual(
- Article.objects.filter(pub_date__year=2004),
- [],
- )
- self.assertQuerysetEqual(
- Article.objects.filter(pub_date__year=2005, pub_date__month=7),
- [''],
- )
-
- self.assertQuerysetEqual(
- Article.objects.filter(pub_date__week_day=5),
- [''],
- )
- self.assertQuerysetEqual(
- Article.objects.filter(pub_date__week_day=6),
- [],
- )
-
- # Django raises an Article.DoesNotExist exception for get() if the
- # parameters don't match any object.
- self.assertRaisesRegexp(
- ObjectDoesNotExist,
- "Article matching query does not exist.",
- Article.objects.get,
- id__exact=2000,
- )
-
- self.assertRaisesRegexp(
- ObjectDoesNotExist,
- "Article matching query does not exist.",
- Article.objects.get,
- pub_date__year=2005,
- pub_date__month=8,
- )
-
- self.assertRaisesRegexp(
- ObjectDoesNotExist,
- "Article matching query does not exist.",
- Article.objects.get,
- pub_date__week_day=6,
- )
-
- # Lookup by a primary key is the most common case, so Django
- # provides a shortcut for primary-key exact lookups.
- # The following is identical to articles.get(id=a.id).
- self.assertEqual(Article.objects.get(pk=a.id), a)
-
- # pk can be used as a shortcut for the primary key name in any query.
- self.assertQuerysetEqual(Article.objects.filter(pk__in=[a.id]),
- [""])
-
- # Model instances of the same type and same ID are considered equal.
- a = Article.objects.get(pk=a.id)
- b = Article.objects.get(pk=a.id)
- self.assertEqual(a, b)
-
- def test_object_creation(self):
- # Create an Article.
- a = Article(
- id=None,
- headline='Area man programs in Python',
- pub_date=datetime(2005, 7, 28),
- )
-
- # Save it into the database. You have to call save() explicitly.
- a.save()
-
- # You can initialize a model instance using positional arguments,
- # which should match the field order as defined in the model.
- a2 = Article(None, 'Second article', datetime(2005, 7, 29))
- a2.save()
-
- self.assertNotEqual(a2.id, a.id)
- self.assertEqual(a2.headline, 'Second article')
- self.assertEqual(a2.pub_date, datetime(2005, 7, 29, 0, 0))
-
- # ...or, you can use keyword arguments.
- a3 = Article(
- id=None,
- headline='Third article',
- pub_date=datetime(2005, 7, 30),
- )
- a3.save()
-
- self.assertNotEqual(a3.id, a.id)
- self.assertNotEqual(a3.id, a2.id)
- self.assertEqual(a3.headline, 'Third article')
- self.assertEqual(a3.pub_date, datetime(2005, 7, 30, 0, 0))
-
- # You can also mix and match position and keyword arguments, but
- # be sure not to duplicate field information.
- a4 = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31))
- a4.save()
- self.assertEqual(a4.headline, 'Fourth article')
-
- # Don't use invalid keyword arguments.
- self.assertRaisesRegexp(
- TypeError,
- "'foo' is an invalid keyword argument for this function",
- Article,
- id=None,
- headline='Invalid',
- pub_date=datetime(2005, 7, 31),
- foo='bar',
- )
-
- # You can leave off the value for an AutoField when creating an
- # object, because it'll get filled in automatically when you save().
- a5 = Article(headline='Article 6', pub_date=datetime(2005, 7, 31))
- a5.save()
- self.assertEqual(a5.headline, 'Article 6')
-
- # If you leave off a field with "default" set, Django will use
- # the default.
- a6 = Article(pub_date=datetime(2005, 7, 31))
- a6.save()
- self.assertEqual(a6.headline, u'Default headline')
-
- # For DateTimeFields, Django saves as much precision (in seconds)
- # as you give it.
- a7 = Article(
- headline='Article 7',
- pub_date=datetime(2005, 7, 31, 12, 30),
- )
- a7.save()
- self.assertEqual(Article.objects.get(id__exact=a7.id).pub_date,
- datetime(2005, 7, 31, 12, 30))
-
- a8 = Article(
- headline='Article 8',
- pub_date=datetime(2005, 7, 31, 12, 30, 45),
- )
- a8.save()
- self.assertEqual(Article.objects.get(id__exact=a8.id).pub_date,
- datetime(2005, 7, 31, 12, 30, 45))
-
- # Saving an object again doesn't create a new object -- it just saves
- # the old one.
- current_id = a8.id
- a8.save()
- self.assertEqual(a8.id, current_id)
- a8.headline = 'Updated article 8'
- a8.save()
- self.assertEqual(a8.id, current_id)
-
- # Check that != and == operators behave as expecte on instances
- self.assertTrue(a7 != a8)
- self.assertFalse(a7 == a8)
- self.assertEqual(a8, Article.objects.get(id__exact=a8.id))
-
- self.assertTrue(Article.objects.get(id__exact=a8.id) != Article.objects.get(id__exact=a7.id))
- self.assertFalse(Article.objects.get(id__exact=a8.id) == Article.objects.get(id__exact=a7.id))
-
- # You can use 'in' to test for membership...
- self.assertTrue(a8 in Article.objects.all())
-
- # ... but there will often be more efficient ways if that is all you need:
- self.assertTrue(Article.objects.filter(id=a8.id).exists())
-
- # dates() returns a list of available dates of the given scope for
- # the given field.
- self.assertQuerysetEqual(
- Article.objects.dates('pub_date', 'year'),
- ["datetime.datetime(2005, 1, 1, 0, 0)"])
- self.assertQuerysetEqual(
- Article.objects.dates('pub_date', 'month'),
- ["datetime.datetime(2005, 7, 1, 0, 0)"])
- self.assertQuerysetEqual(
- Article.objects.dates('pub_date', 'day'),
- ["datetime.datetime(2005, 7, 28, 0, 0)",
- "datetime.datetime(2005, 7, 29, 0, 0)",
- "datetime.datetime(2005, 7, 30, 0, 0)",
- "datetime.datetime(2005, 7, 31, 0, 0)"])
- self.assertQuerysetEqual(
- Article.objects.dates('pub_date', 'day', order='ASC'),
- ["datetime.datetime(2005, 7, 28, 0, 0)",
- "datetime.datetime(2005, 7, 29, 0, 0)",
- "datetime.datetime(2005, 7, 30, 0, 0)",
- "datetime.datetime(2005, 7, 31, 0, 0)"])
- self.assertQuerysetEqual(
- Article.objects.dates('pub_date', 'day', order='DESC'),
- ["datetime.datetime(2005, 7, 31, 0, 0)",
- "datetime.datetime(2005, 7, 30, 0, 0)",
- "datetime.datetime(2005, 7, 29, 0, 0)",
- "datetime.datetime(2005, 7, 28, 0, 0)"])
-
- # dates() requires valid arguments.
- self.assertRaisesRegexp(
- TypeError,
- "dates\(\) takes at least 3 arguments \(1 given\)",
- Article.objects.dates,
- )
-
- self.assertRaisesRegexp(
- FieldDoesNotExist,
- "Article has no field named 'invalid_field'",
- Article.objects.dates,
- "invalid_field",
- "year",
- )
-
- self.assertRaisesRegexp(
- AssertionError,
- "'kind' must be one of 'year', 'month' or 'day'.",
- Article.objects.dates,
- "pub_date",
- "bad_kind",
- )
-
- self.assertRaisesRegexp(
- AssertionError,
- "'order' must be either 'ASC' or 'DESC'.",
- Article.objects.dates,
- "pub_date",
- "year",
- order="bad order",
- )
-
- # Use iterator() with dates() to return a generator that lazily
- # requests each result one at a time, to save memory.
- dates = []
- for article in Article.objects.dates('pub_date', 'day', order='DESC').iterator():
- dates.append(article)
- self.assertEqual(dates, [
- datetime(2005, 7, 31, 0, 0),
- datetime(2005, 7, 30, 0, 0),
- datetime(2005, 7, 29, 0, 0),
- datetime(2005, 7, 28, 0, 0)])
-
- # You can combine queries with & and |.
- s1 = Article.objects.filter(id__exact=a.id)
- s2 = Article.objects.filter(id__exact=a2.id)
- self.assertQuerysetEqual(s1 | s2,
- ["",
- ""])
- self.assertQuerysetEqual(s1 & s2, [])
-
- # You can get the number of objects like this:
- self.assertEqual(len(Article.objects.filter(id__exact=a.id)), 1)
-
- # You can get items using index and slice notation.
- self.assertEqual(Article.objects.all()[0], a)
- self.assertQuerysetEqual(Article.objects.all()[1:3],
- ["", ""])
-
- s3 = Article.objects.filter(id__exact=a3.id)
- self.assertQuerysetEqual((s1 | s2 | s3)[::2],
- ["",
- ""])
-
- # Slicing works with longs.
- self.assertEqual(Article.objects.all()[0L], a)
- self.assertQuerysetEqual(Article.objects.all()[1L:3L],
- ["", ""])
- self.assertQuerysetEqual((s1 | s2 | s3)[::2L],
- ["",
- ""])
-
- # And can be mixed with ints.
- self.assertQuerysetEqual(Article.objects.all()[1:3L],
- ["", ""])
-
- # Slices (without step) are lazy:
- self.assertQuerysetEqual(Article.objects.all()[0:5].filter(),
- ["",
- "",
- "",
- "",
- ""])
-
- # Slicing again works:
- self.assertQuerysetEqual(Article.objects.all()[0:5][0:2],
- ["",
- ""])
- self.assertQuerysetEqual(Article.objects.all()[0:5][:2],
- ["",
- ""])
- self.assertQuerysetEqual(Article.objects.all()[0:5][4:],
- [""])
- self.assertQuerysetEqual(Article.objects.all()[0:5][5:], [])
-
- # Some more tests!
- self.assertQuerysetEqual(Article.objects.all()[2:][0:2],
- ["", ""])
- self.assertQuerysetEqual(Article.objects.all()[2:][:2],
- ["", ""])
- self.assertQuerysetEqual(Article.objects.all()[2:][2:3],
- [""])
-
- # Using an offset without a limit is also possible.
- self.assertQuerysetEqual(Article.objects.all()[5:],
- ["",
- "",
- ""])
-
- # Also, once you have sliced you can't filter, re-order or combine
- self.assertRaisesRegexp(
- AssertionError,
- "Cannot filter a query once a slice has been taken.",
- Article.objects.all()[0:5].filter,
- id=a.id,
- )
-
- self.assertRaisesRegexp(
- AssertionError,
- "Cannot reorder a query once a slice has been taken.",
- Article.objects.all()[0:5].order_by,
- 'id',
- )
-
- try:
- Article.objects.all()[0:1] & Article.objects.all()[4:5]
- self.fail('Should raise an AssertionError')
- except AssertionError, e:
- self.assertEqual(str(e), "Cannot combine queries once a slice has been taken.")
- except Exception, e:
- self.fail('Should raise an AssertionError, not %s' % e)
-
- # Negative slices are not supported, due to database constraints.
- # (hint: inverting your ordering might do what you need).
- try:
- Article.objects.all()[-1]
- self.fail('Should raise an AssertionError')
- except AssertionError, e:
- self.assertEqual(str(e), "Negative indexing is not supported.")
- except Exception, e:
- self.fail('Should raise an AssertionError, not %s' % e)
-
- error = None
- try:
- Article.objects.all()[0:-5]
- except Exception, e:
- error = e
- self.assertTrue(isinstance(error, AssertionError))
- self.assertEqual(str(error), "Negative indexing is not supported.")
-
- # An Article instance doesn't have access to the "objects" attribute.
- # That's only available on the class.
- self.assertRaisesRegexp(
- AttributeError,
- "Manager isn't accessible via Article instances",
- getattr,
- a7,
- "objects",
- )
-
- # Bulk delete test: How many objects before and after the delete?
- self.assertQuerysetEqual(Article.objects.all(),
- ["",
- "",
- "",
- "",
- "",
- "",
- "",
- ""])
- Article.objects.filter(id__lte=a4.id).delete()
- self.assertQuerysetEqual(Article.objects.all(),
- ["",
- "",
- "",
- ""])
-
- @skipUnlessDBFeature('supports_microsecond_precision')
- def test_microsecond_precision(self):
- # In PostgreSQL, microsecond-level precision is available.
- a9 = Article(
- headline='Article 9',
- pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
- )
- a9.save()
- self.assertEqual(Article.objects.get(pk=a9.pk).pub_date,
- datetime(2005, 7, 31, 12, 30, 45, 180))
-
- @skipIfDBFeature('supports_microsecond_precision')
- def test_microsecond_precision_not_supported(self):
- # In MySQL, microsecond-level precision isn't available. You'll lose
- # microsecond-level precision once the data is saved.
- a9 = Article(
- headline='Article 9',
- pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
- )
- a9.save()
- self.assertEqual(Article.objects.get(id__exact=a9.id).pub_date,
- datetime(2005, 7, 31, 12, 30, 45))
-
- def test_manually_specify_primary_key(self):
- # You can manually specify the primary key when creating a new object.
- a101 = Article(
- id=101,
- headline='Article 101',
- pub_date=datetime(2005, 7, 31, 12, 30, 45),
- )
- a101.save()
- a101 = Article.objects.get(pk=101)
- self.assertEqual(a101.headline, u'Article 101')
-
- def test_create_method(self):
- # You can create saved objects in a single step
- a10 = Article.objects.create(
- headline="Article 10",
- pub_date=datetime(2005, 7, 31, 12, 30, 45),
- )
- self.assertEqual(Article.objects.get(headline="Article 10"), a10)
-
- def test_year_lookup_edge_case(self):
- # Edge-case test: A year lookup should retrieve all objects in
- # the given year, including Jan. 1 and Dec. 31.
- a11 = Article.objects.create(
- headline='Article 11',
- pub_date=datetime(2008, 1, 1),
- )
- a12 = Article.objects.create(
- headline='Article 12',
- pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
- )
- self.assertQuerysetEqual(Article.objects.filter(pub_date__year=2008),
- ["", ""])
-
- def test_unicode_data(self):
- # Unicode data works, too.
- a = Article(
- headline=u'\u6797\u539f \u3081\u3050\u307f',
- pub_date=datetime(2005, 7, 28),
- )
- a.save()
- self.assertEqual(Article.objects.get(pk=a.id).headline,
- u'\u6797\u539f \u3081\u3050\u307f')
-
- def test_hash_function(self):
- # Model instances have a hash function, so they can be used in sets
- # or as dictionary keys. Two models compare as equal if their primary
- # keys are equal.
- a10 = Article.objects.create(
- headline="Article 10",
- pub_date=datetime(2005, 7, 31, 12, 30, 45),
- )
- a11 = Article.objects.create(
- headline='Article 11',
- pub_date=datetime(2008, 1, 1),
- )
- a12 = Article.objects.create(
- headline='Article 12',
- pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
- )
-
- s = set([a10, a11, a12])
- self.assertTrue(Article.objects.get(headline='Article 11') in s)
-
- def test_extra_method_select_argument_with_dashes_and_values(self):
- # The 'select' argument to extra() supports names with dashes in
- # them, as long as you use values().
- a10 = Article.objects.create(
- headline="Article 10",
- pub_date=datetime(2005, 7, 31, 12, 30, 45),
- )
- a11 = Article.objects.create(
- headline='Article 11',
- pub_date=datetime(2008, 1, 1),
- )
- a12 = Article.objects.create(
- headline='Article 12',
- pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
- )
-
- dicts = Article.objects.filter(
- pub_date__year=2008).extra(
- select={'dashed-value': '1'}
- ).values('headline', 'dashed-value')
- self.assertEqual([sorted(d.items()) for d in dicts],
- [[('dashed-value', 1), ('headline', u'Article 11')], [('dashed-value', 1), ('headline', u'Article 12')]])
-
- def test_extra_method_select_argument_with_dashes(self):
- # If you use 'select' with extra() and names containing dashes on a
- # query that's *not* a values() query, those extra 'select' values
- # will silently be ignored.
- a10 = Article.objects.create(
- headline="Article 10",
- pub_date=datetime(2005, 7, 31, 12, 30, 45),
- )
- a11 = Article.objects.create(
- headline='Article 11',
- pub_date=datetime(2008, 1, 1),
- )
- a12 = Article.objects.create(
- headline='Article 12',
- pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
- )
-
- articles = Article.objects.filter(
- pub_date__year=2008).extra(
- select={'dashed-value': '1', 'undashedvalue': '2'})
- self.assertEqual(articles[0].undashedvalue, 2)
-
- def test_create_relation_with_ugettext_lazy(self):
- """
- Test that ugettext_lazy objects work when saving model instances
- through various methods. Refs #10498.
- """
- notlazy = u'test'
- lazy = ugettext_lazy(notlazy)
- reporter = Article.objects.create(headline=lazy, pub_date=datetime.now())
- article = Article.objects.get()
- self.assertEqual(article.headline, notlazy)
- # test that assign + save works with Promise objecs
- article.headline = lazy
- article.save()
- self.assertEqual(article.headline, notlazy)
- # test .update()
- Article.objects.update(headline=lazy)
- article = Article.objects.get()
- self.assertEqual(article.headline, notlazy)
- # still test bulk_create()
- Article.objects.all().delete()
- Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())])
- article = Article.objects.get()
- self.assertEqual(article.headline, notlazy)
diff --git a/tests/django14/bulk_create/tests.py b/tests/django14/bulk_create/tests.py
deleted file mode 100644
index f75d983a..00000000
--- a/tests/django14/bulk_create/tests.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from __future__ import with_statement, absolute_import
-
-from operator import attrgetter
-
-from django.db import connection
-from django.test import TestCase, skipIfDBFeature
-from django.test.utils import override_settings
-
-from .models import Country, Restaurant, Pizzeria, State, TwoFields
-
-
-class BulkCreateTests(TestCase):
- def setUp(self):
- self.data = [
- Country(name="United States of America", iso_two_letter="US"),
- Country(name="The Netherlands", iso_two_letter="NL"),
- Country(name="Germany", iso_two_letter="DE"),
- Country(name="Czech Republic", iso_two_letter="CZ")
- ]
-
- def test_simple(self):
- created = Country.objects.bulk_create(self.data)
- self.assertEqual(len(created), 4)
- self.assertQuerysetEqual(Country.objects.order_by("-name"), [
- "United States of America", "The Netherlands", "Germany", "Czech Republic"
- ], attrgetter("name"))
-
- created = Country.objects.bulk_create([])
- self.assertEqual(created, [])
- self.assertEqual(Country.objects.count(), 4)
-
- def test_efficiency(self):
- with self.assertNumQueries(1):
- Country.objects.bulk_create(self.data)
-
- def test_inheritance(self):
- Restaurant.objects.bulk_create([
- Restaurant(name="Nicholas's")
- ])
- self.assertQuerysetEqual(Restaurant.objects.all(), [
- "Nicholas's",
- ], attrgetter("name"))
- with self.assertRaises(ValueError):
- Pizzeria.objects.bulk_create([
- Pizzeria(name="The Art of Pizza")
- ])
- self.assertQuerysetEqual(Pizzeria.objects.all(), [])
- self.assertQuerysetEqual(Restaurant.objects.all(), [
- "Nicholas's",
- ], attrgetter("name"))
-
- def test_non_auto_increment_pk(self):
- with self.assertNumQueries(1):
- State.objects.bulk_create([
- State(two_letter_code=s)
- for s in ["IL", "NY", "CA", "ME"]
- ])
- self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [
- "CA", "IL", "ME", "NY",
- ], attrgetter("two_letter_code"))
-
- def test_batch_same_vals(self):
- # Sqlite had a problem where all the same-valued models were
- # collapsed to one insert.
- Restaurant.objects.bulk_create([
- Restaurant(name='foo') for i in range(0, 2)
- ])
- self.assertEqual(Restaurant.objects.count(), 2)
-
- def test_large_batch(self):
- with override_settings(DEBUG=True):
- connection.queries = []
- TwoFields.objects.bulk_create([
- TwoFields(f1=i, f2=i+1) for i in range(0, 1001)
- ])
- self.assertTrue(len(connection.queries) < 10)
- self.assertEqual(TwoFields.objects.count(), 1001)
- self.assertEqual(
- TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(),
- 101)
- self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101)
-
- def test_large_batch_mixed(self):
- """
- Test inserting a large batch with objects having primary key set
- mixed together with objects without PK set.
- """
- with override_settings(DEBUG=True):
- connection.queries = []
- TwoFields.objects.bulk_create([
- TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i+1)
- for i in range(100000, 101000)])
- self.assertTrue(len(connection.queries) < 10)
- self.assertEqual(TwoFields.objects.count(), 1000)
- # We can't assume much about the ID's created, except that the above
- # created IDs must exist.
- id_range = range(100000, 101000, 2)
- self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500)
- self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500)
-
- def test_explicit_batch_size(self):
- objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)]
- with self.assertNumQueries(2):
- TwoFields.objects.bulk_create(objs, 50)
- TwoFields.objects.all().delete()
- with self.assertNumQueries(1):
- TwoFields.objects.bulk_create(objs, len(objs))
diff --git a/tests/django14/cache/tests.py b/tests/django14/cache/tests.py
deleted file mode 100644
index bd29cde5..00000000
--- a/tests/django14/cache/tests.py
+++ /dev/null
@@ -1,1768 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Unit tests for cache framework
-# Uses whatever cache backend is set in the test settings file.
-from __future__ import with_statement, absolute_import
-
-import hashlib
-import os
-import re
-import StringIO
-import tempfile
-import time
-import warnings
-
-from django.conf import settings
-from django.core import management
-from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
-from django.core.cache.backends.base import (CacheKeyWarning,
- InvalidCacheBackendError)
-from django.db import router
-from django.http import HttpResponse, HttpRequest, QueryDict
-from django.middleware.cache import (FetchFromCacheMiddleware,
- UpdateCacheMiddleware, CacheMiddleware)
-from django.template import Template
-from django.template.response import TemplateResponse
-from django.test import TestCase, TransactionTestCase, RequestFactory
-from django.test.utils import (get_warnings_state, restore_warnings_state,
- override_settings)
-from django.utils import timezone, translation, unittest
-from django.utils.cache import (patch_vary_headers, get_cache_key,
- learn_cache_key, patch_cache_control, patch_response_headers)
-from django.utils.encoding import force_unicode
-from django.views.decorators.cache import cache_page
-
-from .models import Poll, expensive_calculation
-
-# functions/classes for complex data type tests
-def f():
- return 42
-
-class C:
- def m(n):
- return 24
-
-
-class DummyCacheTests(unittest.TestCase):
- # The Dummy cache backend doesn't really behave like a test backend,
- # so it has different test requirements.
- backend_name = 'django.core.cache.backends.dummy.DummyCache'
-
- def setUp(self):
- self.cache = get_cache(self.backend_name)
-
- def test_simple(self):
- "Dummy cache backend ignores cache set calls"
- self.cache.set("key", "value")
- self.assertEqual(self.cache.get("key"), None)
-
- def test_add(self):
- "Add doesn't do anything in dummy cache backend"
- self.cache.add("addkey1", "value")
- result = self.cache.add("addkey1", "newvalue")
- self.assertEqual(result, True)
- self.assertEqual(self.cache.get("addkey1"), None)
-
- def test_non_existent(self):
- "Non-existent keys aren't found in the dummy cache backend"
- self.assertEqual(self.cache.get("does_not_exist"), None)
- self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
-
- def test_get_many(self):
- "get_many returns nothing for the dummy cache backend"
- self.cache.set('a', 'a')
- self.cache.set('b', 'b')
- self.cache.set('c', 'c')
- self.cache.set('d', 'd')
- self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {})
- self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {})
-
- def test_delete(self):
- "Cache deletion is transparently ignored on the dummy cache backend"
- self.cache.set("key1", "spam")
- self.cache.set("key2", "eggs")
- self.assertEqual(self.cache.get("key1"), None)
- self.cache.delete("key1")
- self.assertEqual(self.cache.get("key1"), None)
- self.assertEqual(self.cache.get("key2"), None)
-
- def test_has_key(self):
- "The has_key method doesn't ever return True for the dummy cache backend"
- self.cache.set("hello1", "goodbye1")
- self.assertEqual(self.cache.has_key("hello1"), False)
- self.assertEqual(self.cache.has_key("goodbye1"), False)
-
- def test_in(self):
- "The in operator doesn't ever return True for the dummy cache backend"
- self.cache.set("hello2", "goodbye2")
- self.assertEqual("hello2" in self.cache, False)
- self.assertEqual("goodbye2" in self.cache, False)
-
- def test_incr(self):
- "Dummy cache values can't be incremented"
- self.cache.set('answer', 42)
- self.assertRaises(ValueError, self.cache.incr, 'answer')
- self.assertRaises(ValueError, self.cache.incr, 'does_not_exist')
-
- def test_decr(self):
- "Dummy cache values can't be decremented"
- self.cache.set('answer', 42)
- self.assertRaises(ValueError, self.cache.decr, 'answer')
- self.assertRaises(ValueError, self.cache.decr, 'does_not_exist')
-
- def test_data_types(self):
- "All data types are ignored equally by the dummy cache"
- stuff = {
- 'string' : 'this is a string',
- 'int' : 42,
- 'list' : [1, 2, 3, 4],
- 'tuple' : (1, 2, 3, 4),
- 'dict' : {'A': 1, 'B' : 2},
- 'function' : f,
- 'class' : C,
- }
- self.cache.set("stuff", stuff)
- self.assertEqual(self.cache.get("stuff"), None)
-
- def test_expiration(self):
- "Expiration has no effect on the dummy cache"
- self.cache.set('expire1', 'very quickly', 1)
- self.cache.set('expire2', 'very quickly', 1)
- self.cache.set('expire3', 'very quickly', 1)
-
- time.sleep(2)
- self.assertEqual(self.cache.get("expire1"), None)
-
- self.cache.add("expire2", "newvalue")
- self.assertEqual(self.cache.get("expire2"), None)
- self.assertEqual(self.cache.has_key("expire3"), False)
-
- def test_unicode(self):
- "Unicode values are ignored by the dummy cache"
- stuff = {
- u'ascii': u'ascii_value',
- u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
- u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
- u'ascii2': {u'x' : 1 }
- }
- for (key, value) in stuff.items():
- self.cache.set(key, value)
- self.assertEqual(self.cache.get(key), None)
-
- def test_set_many(self):
- "set_many does nothing for the dummy cache backend"
- self.cache.set_many({'a': 1, 'b': 2})
- self.cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1')
-
- def test_delete_many(self):
- "delete_many does nothing for the dummy cache backend"
- self.cache.delete_many(['a', 'b'])
-
- def test_clear(self):
- "clear does nothing for the dummy cache backend"
- self.cache.clear()
-
- def test_incr_version(self):
- "Dummy cache versions can't be incremented"
- self.cache.set('answer', 42)
- self.assertRaises(ValueError, self.cache.incr_version, 'answer')
- self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
-
- def test_decr_version(self):
- "Dummy cache versions can't be decremented"
- self.cache.set('answer', 42)
- self.assertRaises(ValueError, self.cache.decr_version, 'answer')
- self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist')
-
-
-class BaseCacheTests(object):
- # A common set of tests to apply to all cache backends
-
- def _get_request_cache(self, path):
- request = HttpRequest()
- request.META = {
- 'SERVER_NAME': 'testserver',
- 'SERVER_PORT': 80,
- }
- request.path = request.path_info = path
- request._cache_update_cache = True
- request.method = 'GET'
- return request
-
- def test_simple(self):
- # Simple cache set/get works
- self.cache.set("key", "value")
- self.assertEqual(self.cache.get("key"), "value")
-
- def test_add(self):
- # A key can be added to a cache
- self.cache.add("addkey1", "value")
- result = self.cache.add("addkey1", "newvalue")
- self.assertEqual(result, False)
- self.assertEqual(self.cache.get("addkey1"), "value")
-
- def test_prefix(self):
- # Test for same cache key conflicts between shared backend
- self.cache.set('somekey', 'value')
-
- # should not be set in the prefixed cache
- self.assertFalse(self.prefix_cache.has_key('somekey'))
-
- self.prefix_cache.set('somekey', 'value2')
-
- self.assertEqual(self.cache.get('somekey'), 'value')
- self.assertEqual(self.prefix_cache.get('somekey'), 'value2')
-
- def test_non_existent(self):
- # Non-existent cache keys return as None/default
- # get with non-existent keys
- self.assertEqual(self.cache.get("does_not_exist"), None)
- self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
-
- def test_get_many(self):
- # Multiple cache keys can be returned using get_many
- self.cache.set('a', 'a')
- self.cache.set('b', 'b')
- self.cache.set('c', 'c')
- self.cache.set('d', 'd')
- self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
- self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
-
- def test_delete(self):
- # Cache keys can be deleted
- self.cache.set("key1", "spam")
- self.cache.set("key2", "eggs")
- self.assertEqual(self.cache.get("key1"), "spam")
- self.cache.delete("key1")
- self.assertEqual(self.cache.get("key1"), None)
- self.assertEqual(self.cache.get("key2"), "eggs")
-
- def test_has_key(self):
- # The cache can be inspected for cache keys
- self.cache.set("hello1", "goodbye1")
- self.assertEqual(self.cache.has_key("hello1"), True)
- self.assertEqual(self.cache.has_key("goodbye1"), False)
-
- def test_in(self):
- # The in operator can be used to inspect cache contents
- self.cache.set("hello2", "goodbye2")
- self.assertEqual("hello2" in self.cache, True)
- self.assertEqual("goodbye2" in self.cache, False)
-
- def test_incr(self):
- # Cache values can be incremented
- self.cache.set('answer', 41)
- self.assertEqual(self.cache.incr('answer'), 42)
- self.assertEqual(self.cache.get('answer'), 42)
- self.assertEqual(self.cache.incr('answer', 10), 52)
- self.assertEqual(self.cache.get('answer'), 52)
- self.assertRaises(ValueError, self.cache.incr, 'does_not_exist')
-
- def test_decr(self):
- # Cache values can be decremented
- self.cache.set('answer', 43)
- self.assertEqual(self.cache.decr('answer'), 42)
- self.assertEqual(self.cache.get('answer'), 42)
- self.assertEqual(self.cache.decr('answer', 10), 32)
- self.assertEqual(self.cache.get('answer'), 32)
- self.assertRaises(ValueError, self.cache.decr, 'does_not_exist')
-
- def test_data_types(self):
- # Many different data types can be cached
- stuff = {
- 'string' : 'this is a string',
- 'int' : 42,
- 'list' : [1, 2, 3, 4],
- 'tuple' : (1, 2, 3, 4),
- 'dict' : {'A': 1, 'B' : 2},
- 'function' : f,
- 'class' : C,
- }
- self.cache.set("stuff", stuff)
- self.assertEqual(self.cache.get("stuff"), stuff)
-
- def test_cache_read_for_model_instance(self):
- # Don't want fields with callable as default to be called on cache read
- expensive_calculation.num_runs = 0
- Poll.objects.all().delete()
- my_poll = Poll.objects.create(question="Well?")
- self.assertEqual(Poll.objects.count(), 1)
- pub_date = my_poll.pub_date
- self.cache.set('question', my_poll)
- cached_poll = self.cache.get('question')
- self.assertEqual(cached_poll.pub_date, pub_date)
- # We only want the default expensive calculation run once
- self.assertEqual(expensive_calculation.num_runs, 1)
-
- def test_cache_write_for_model_instance_with_deferred(self):
- # Don't want fields with callable as default to be called on cache write
- expensive_calculation.num_runs = 0
- Poll.objects.all().delete()
- my_poll = Poll.objects.create(question="What?")
- self.assertEqual(expensive_calculation.num_runs, 1)
- defer_qs = Poll.objects.all().defer('question')
- self.assertEqual(defer_qs.count(), 1)
- self.assertEqual(expensive_calculation.num_runs, 1)
- self.cache.set('deferred_queryset', defer_qs)
- # cache set should not re-evaluate default functions
- self.assertEqual(expensive_calculation.num_runs, 1)
-
- def test_cache_read_for_model_instance_with_deferred(self):
- # Don't want fields with callable as default to be called on cache read
- expensive_calculation.num_runs = 0
- Poll.objects.all().delete()
- my_poll = Poll.objects.create(question="What?")
- self.assertEqual(expensive_calculation.num_runs, 1)
- defer_qs = Poll.objects.all().defer('question')
- self.assertEqual(defer_qs.count(), 1)
- self.cache.set('deferred_queryset', defer_qs)
- self.assertEqual(expensive_calculation.num_runs, 1)
- runs_before_cache_read = expensive_calculation.num_runs
- cached_polls = self.cache.get('deferred_queryset')
- # We only want the default expensive calculation run on creation and set
- self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
-
- def test_expiration(self):
- # Cache values can be set to expire
- self.cache.set('expire1', 'very quickly', 1)
- self.cache.set('expire2', 'very quickly', 1)
- self.cache.set('expire3', 'very quickly', 1)
-
- time.sleep(2)
- self.assertEqual(self.cache.get("expire1"), None)
-
- self.cache.add("expire2", "newvalue")
- self.assertEqual(self.cache.get("expire2"), "newvalue")
- self.assertEqual(self.cache.has_key("expire3"), False)
-
- def test_unicode(self):
- # Unicode values can be cached
- stuff = {
- u'ascii': u'ascii_value',
- u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
- u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
- u'ascii2': {u'x' : 1 }
- }
- # Test `set`
- for (key, value) in stuff.items():
- self.cache.set(key, value)
- self.assertEqual(self.cache.get(key), value)
-
- # Test `add`
- for (key, value) in stuff.items():
- self.cache.delete(key)
- self.cache.add(key, value)
- self.assertEqual(self.cache.get(key), value)
-
- # Test `set_many`
- for (key, value) in stuff.items():
- self.cache.delete(key)
- self.cache.set_many(stuff)
- for (key, value) in stuff.items():
- self.assertEqual(self.cache.get(key), value)
-
- def test_binary_string(self):
- # Binary strings should be cacheable
- from zlib import compress, decompress
- value = 'value_to_be_compressed'
- compressed_value = compress(value)
-
- # Test set
- self.cache.set('binary1', compressed_value)
- compressed_result = self.cache.get('binary1')
- self.assertEqual(compressed_value, compressed_result)
- self.assertEqual(value, decompress(compressed_result))
-
- # Test add
- self.cache.add('binary1-add', compressed_value)
- compressed_result = self.cache.get('binary1-add')
- self.assertEqual(compressed_value, compressed_result)
- self.assertEqual(value, decompress(compressed_result))
-
- # Test set_many
- self.cache.set_many({'binary1-set_many': compressed_value})
- compressed_result = self.cache.get('binary1-set_many')
- self.assertEqual(compressed_value, compressed_result)
- self.assertEqual(value, decompress(compressed_result))
-
- def test_set_many(self):
- # Multiple keys can be set using set_many
- self.cache.set_many({"key1": "spam", "key2": "eggs"})
- self.assertEqual(self.cache.get("key1"), "spam")
- self.assertEqual(self.cache.get("key2"), "eggs")
-
- def test_set_many_expiration(self):
- # set_many takes a second ``timeout`` parameter
- self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
- time.sleep(2)
- self.assertEqual(self.cache.get("key1"), None)
- self.assertEqual(self.cache.get("key2"), None)
-
- def test_delete_many(self):
- # Multiple keys can be deleted using delete_many
- self.cache.set("key1", "spam")
- self.cache.set("key2", "eggs")
- self.cache.set("key3", "ham")
- self.cache.delete_many(["key1", "key2"])
- self.assertEqual(self.cache.get("key1"), None)
- self.assertEqual(self.cache.get("key2"), None)
- self.assertEqual(self.cache.get("key3"), "ham")
-
- def test_clear(self):
- # The cache can be emptied using clear
- self.cache.set("key1", "spam")
- self.cache.set("key2", "eggs")
- self.cache.clear()
- self.assertEqual(self.cache.get("key1"), None)
- self.assertEqual(self.cache.get("key2"), None)
-
- def test_long_timeout(self):
- '''
- Using a timeout greater than 30 days makes memcached think
- it is an absolute expiration timestamp instead of a relative
- offset. Test that we honour this convention. Refs #12399.
- '''
- self.cache.set('key1', 'eggs', 60*60*24*30 + 1) #30 days + 1 second
- self.assertEqual(self.cache.get('key1'), 'eggs')
-
- self.cache.add('key2', 'ham', 60*60*24*30 + 1)
- self.assertEqual(self.cache.get('key2'), 'ham')
-
- self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60*60*24*30 + 1)
- self.assertEqual(self.cache.get('key3'), 'sausage')
- self.assertEqual(self.cache.get('key4'), 'lobster bisque')
-
- def test_float_timeout(self):
- # Make sure a timeout given as a float doesn't crash anything.
- self.cache.set("key1", "spam", 100.2)
- self.assertEqual(self.cache.get("key1"), "spam")
-
- def perform_cull_test(self, initial_count, final_count):
- """This is implemented as a utility method, because only some of the backends
- implement culling. The culling algorithm also varies slightly, so the final
- number of entries will vary between backends"""
- # Create initial cache key entries. This will overflow the cache, causing a cull
- for i in range(1, initial_count):
- self.cache.set('cull%d' % i, 'value', 1000)
- count = 0
- # Count how many keys are left in the cache.
- for i in range(1, initial_count):
- if self.cache.has_key('cull%d' % i):
- count = count + 1
- self.assertEqual(count, final_count)
-
- def test_invalid_keys(self):
- """
- All the builtin backends (except memcached, see below) should warn on
- keys that would be refused by memcached. This encourages portable
- caching code without making it too difficult to use production backends
- with more liberal key rules. Refs #6447.
-
- """
- # mimic custom ``make_key`` method being defined since the default will
- # never show the below warnings
- def func(key, *args):
- return key
-
- old_func = self.cache.key_func
- self.cache.key_func = func
- # On Python 2.6+ we could use the catch_warnings context
- # manager to test this warning nicely. Since we can't do that
- # yet, the cleanest option is to temporarily ask for
- # CacheKeyWarning to be raised as an exception.
- _warnings_state = get_warnings_state()
- warnings.simplefilter("error", CacheKeyWarning)
-
- try:
- # memcached does not allow whitespace or control characters in keys
- self.assertRaises(CacheKeyWarning, self.cache.set, 'key with spaces', 'value')
- # memcached limits key length to 250
- self.assertRaises(CacheKeyWarning, self.cache.set, 'a' * 251, 'value')
- finally:
- restore_warnings_state(_warnings_state)
- self.cache.key_func = old_func
-
- def test_cache_versioning_get_set(self):
- # set, using default version = 1
- self.cache.set('answer1', 42)
- self.assertEqual(self.cache.get('answer1'), 42)
- self.assertEqual(self.cache.get('answer1', version=1), 42)
- self.assertEqual(self.cache.get('answer1', version=2), None)
-
- self.assertEqual(self.v2_cache.get('answer1'), None)
- self.assertEqual(self.v2_cache.get('answer1', version=1), 42)
- self.assertEqual(self.v2_cache.get('answer1', version=2), None)
-
- # set, default version = 1, but manually override version = 2
- self.cache.set('answer2', 42, version=2)
- self.assertEqual(self.cache.get('answer2'), None)
- self.assertEqual(self.cache.get('answer2', version=1), None)
- self.assertEqual(self.cache.get('answer2', version=2), 42)
-
- self.assertEqual(self.v2_cache.get('answer2'), 42)
- self.assertEqual(self.v2_cache.get('answer2', version=1), None)
- self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
-
- # v2 set, using default version = 2
- self.v2_cache.set('answer3', 42)
- self.assertEqual(self.cache.get('answer3'), None)
- self.assertEqual(self.cache.get('answer3', version=1), None)
- self.assertEqual(self.cache.get('answer3', version=2), 42)
-
- self.assertEqual(self.v2_cache.get('answer3'), 42)
- self.assertEqual(self.v2_cache.get('answer3', version=1), None)
- self.assertEqual(self.v2_cache.get('answer3', version=2), 42)
-
- # v2 set, default version = 2, but manually override version = 1
- self.v2_cache.set('answer4', 42, version=1)
- self.assertEqual(self.cache.get('answer4'), 42)
- self.assertEqual(self.cache.get('answer4', version=1), 42)
- self.assertEqual(self.cache.get('answer4', version=2), None)
-
- self.assertEqual(self.v2_cache.get('answer4'), None)
- self.assertEqual(self.v2_cache.get('answer4', version=1), 42)
- self.assertEqual(self.v2_cache.get('answer4', version=2), None)
-
- def test_cache_versioning_add(self):
-
- # add, default version = 1, but manually override version = 2
- self.cache.add('answer1', 42, version=2)
- self.assertEqual(self.cache.get('answer1', version=1), None)
- self.assertEqual(self.cache.get('answer1', version=2), 42)
-
- self.cache.add('answer1', 37, version=2)
- self.assertEqual(self.cache.get('answer1', version=1), None)
- self.assertEqual(self.cache.get('answer1', version=2), 42)
-
- self.cache.add('answer1', 37, version=1)
- self.assertEqual(self.cache.get('answer1', version=1), 37)
- self.assertEqual(self.cache.get('answer1', version=2), 42)
-
- # v2 add, using default version = 2
- self.v2_cache.add('answer2', 42)
- self.assertEqual(self.cache.get('answer2', version=1), None)
- self.assertEqual(self.cache.get('answer2', version=2), 42)
-
- self.v2_cache.add('answer2', 37)
- self.assertEqual(self.cache.get('answer2', version=1), None)
- self.assertEqual(self.cache.get('answer2', version=2), 42)
-
- self.v2_cache.add('answer2', 37, version=1)
- self.assertEqual(self.cache.get('answer2', version=1), 37)
- self.assertEqual(self.cache.get('answer2', version=2), 42)
-
- # v2 add, default version = 2, but manually override version = 1
- self.v2_cache.add('answer3', 42, version=1)
- self.assertEqual(self.cache.get('answer3', version=1), 42)
- self.assertEqual(self.cache.get('answer3', version=2), None)
-
- self.v2_cache.add('answer3', 37, version=1)
- self.assertEqual(self.cache.get('answer3', version=1), 42)
- self.assertEqual(self.cache.get('answer3', version=2), None)
-
- self.v2_cache.add('answer3', 37)
- self.assertEqual(self.cache.get('answer3', version=1), 42)
- self.assertEqual(self.cache.get('answer3', version=2), 37)
-
- def test_cache_versioning_has_key(self):
- self.cache.set('answer1', 42)
-
- # has_key
- self.assertTrue(self.cache.has_key('answer1'))
- self.assertTrue(self.cache.has_key('answer1', version=1))
- self.assertFalse(self.cache.has_key('answer1', version=2))
-
- self.assertFalse(self.v2_cache.has_key('answer1'))
- self.assertTrue(self.v2_cache.has_key('answer1', version=1))
- self.assertFalse(self.v2_cache.has_key('answer1', version=2))
-
- def test_cache_versioning_delete(self):
- self.cache.set('answer1', 37, version=1)
- self.cache.set('answer1', 42, version=2)
- self.cache.delete('answer1')
- self.assertEqual(self.cache.get('answer1', version=1), None)
- self.assertEqual(self.cache.get('answer1', version=2), 42)
-
- self.cache.set('answer2', 37, version=1)
- self.cache.set('answer2', 42, version=2)
- self.cache.delete('answer2', version=2)
- self.assertEqual(self.cache.get('answer2', version=1), 37)
- self.assertEqual(self.cache.get('answer2', version=2), None)
-
- self.cache.set('answer3', 37, version=1)
- self.cache.set('answer3', 42, version=2)
- self.v2_cache.delete('answer3')
- self.assertEqual(self.cache.get('answer3', version=1), 37)
- self.assertEqual(self.cache.get('answer3', version=2), None)
-
- self.cache.set('answer4', 37, version=1)
- self.cache.set('answer4', 42, version=2)
- self.v2_cache.delete('answer4', version=1)
- self.assertEqual(self.cache.get('answer4', version=1), None)
- self.assertEqual(self.cache.get('answer4', version=2), 42)
-
- def test_cache_versioning_incr_decr(self):
- self.cache.set('answer1', 37, version=1)
- self.cache.set('answer1', 42, version=2)
- self.cache.incr('answer1')
- self.assertEqual(self.cache.get('answer1', version=1), 38)
- self.assertEqual(self.cache.get('answer1', version=2), 42)
- self.cache.decr('answer1')
- self.assertEqual(self.cache.get('answer1', version=1), 37)
- self.assertEqual(self.cache.get('answer1', version=2), 42)
-
- self.cache.set('answer2', 37, version=1)
- self.cache.set('answer2', 42, version=2)
- self.cache.incr('answer2', version=2)
- self.assertEqual(self.cache.get('answer2', version=1), 37)
- self.assertEqual(self.cache.get('answer2', version=2), 43)
- self.cache.decr('answer2', version=2)
- self.assertEqual(self.cache.get('answer2', version=1), 37)
- self.assertEqual(self.cache.get('answer2', version=2), 42)
-
- self.cache.set('answer3', 37, version=1)
- self.cache.set('answer3', 42, version=2)
- self.v2_cache.incr('answer3')
- self.assertEqual(self.cache.get('answer3', version=1), 37)
- self.assertEqual(self.cache.get('answer3', version=2), 43)
- self.v2_cache.decr('answer3')
- self.assertEqual(self.cache.get('answer3', version=1), 37)
- self.assertEqual(self.cache.get('answer3', version=2), 42)
-
- self.cache.set('answer4', 37, version=1)
- self.cache.set('answer4', 42, version=2)
- self.v2_cache.incr('answer4', version=1)
- self.assertEqual(self.cache.get('answer4', version=1), 38)
- self.assertEqual(self.cache.get('answer4', version=2), 42)
- self.v2_cache.decr('answer4', version=1)
- self.assertEqual(self.cache.get('answer4', version=1), 37)
- self.assertEqual(self.cache.get('answer4', version=2), 42)
-
- def test_cache_versioning_get_set_many(self):
- # set, using default version = 1
- self.cache.set_many({'ford1': 37, 'arthur1': 42})
- self.assertEqual(self.cache.get_many(['ford1','arthur1']),
- {'ford1': 37, 'arthur1': 42})
- self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=1),
- {'ford1': 37, 'arthur1': 42})
- self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=2), {})
-
- self.assertEqual(self.v2_cache.get_many(['ford1','arthur1']), {})
- self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=1),
- {'ford1': 37, 'arthur1': 42})
- self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=2), {})
-
- # set, default version = 1, but manually override version = 2
- self.cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
- self.assertEqual(self.cache.get_many(['ford2','arthur2']), {})
- self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=1), {})
- self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=2),
- {'ford2': 37, 'arthur2': 42})
-
- self.assertEqual(self.v2_cache.get_many(['ford2','arthur2']),
- {'ford2': 37, 'arthur2': 42})
- self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=1), {})
- self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=2),
- {'ford2': 37, 'arthur2': 42})
-
- # v2 set, using default version = 2
- self.v2_cache.set_many({'ford3': 37, 'arthur3': 42})
- self.assertEqual(self.cache.get_many(['ford3','arthur3']), {})
- self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=1), {})
- self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=2),
- {'ford3': 37, 'arthur3': 42})
-
- self.assertEqual(self.v2_cache.get_many(['ford3','arthur3']),
- {'ford3': 37, 'arthur3': 42})
- self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=1), {})
- self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=2),
- {'ford3': 37, 'arthur3': 42})
-
- # v2 set, default version = 2, but manually override version = 1
- self.v2_cache.set_many({'ford4': 37, 'arthur4': 42}, version=1)
- self.assertEqual(self.cache.get_many(['ford4','arthur4']),
- {'ford4': 37, 'arthur4': 42})
- self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=1),
- {'ford4': 37, 'arthur4': 42})
- self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=2), {})
-
- self.assertEqual(self.v2_cache.get_many(['ford4','arthur4']), {})
- self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=1),
- {'ford4': 37, 'arthur4': 42})
- self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=2), {})
-
- def test_incr_version(self):
- self.cache.set('answer', 42, version=2)
- self.assertEqual(self.cache.get('answer'), None)
- self.assertEqual(self.cache.get('answer', version=1), None)
- self.assertEqual(self.cache.get('answer', version=2), 42)
- self.assertEqual(self.cache.get('answer', version=3), None)
-
- self.assertEqual(self.cache.incr_version('answer', version=2), 3)
- self.assertEqual(self.cache.get('answer'), None)
- self.assertEqual(self.cache.get('answer', version=1), None)
- self.assertEqual(self.cache.get('answer', version=2), None)
- self.assertEqual(self.cache.get('answer', version=3), 42)
-
- self.v2_cache.set('answer2', 42)
- self.assertEqual(self.v2_cache.get('answer2'), 42)
- self.assertEqual(self.v2_cache.get('answer2', version=1), None)
- self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
- self.assertEqual(self.v2_cache.get('answer2', version=3), None)
-
- self.assertEqual(self.v2_cache.incr_version('answer2'), 3)
- self.assertEqual(self.v2_cache.get('answer2'), None)
- self.assertEqual(self.v2_cache.get('answer2', version=1), None)
- self.assertEqual(self.v2_cache.get('answer2', version=2), None)
- self.assertEqual(self.v2_cache.get('answer2', version=3), 42)
-
- self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
-
- def test_decr_version(self):
- self.cache.set('answer', 42, version=2)
- self.assertEqual(self.cache.get('answer'), None)
- self.assertEqual(self.cache.get('answer', version=1), None)
- self.assertEqual(self.cache.get('answer', version=2), 42)
-
- self.assertEqual(self.cache.decr_version('answer', version=2), 1)
- self.assertEqual(self.cache.get('answer'), 42)
- self.assertEqual(self.cache.get('answer', version=1), 42)
- self.assertEqual(self.cache.get('answer', version=2), None)
-
- self.v2_cache.set('answer2', 42)
- self.assertEqual(self.v2_cache.get('answer2'), 42)
- self.assertEqual(self.v2_cache.get('answer2', version=1), None)
- self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
-
- self.assertEqual(self.v2_cache.decr_version('answer2'), 1)
- self.assertEqual(self.v2_cache.get('answer2'), None)
- self.assertEqual(self.v2_cache.get('answer2', version=1), 42)
- self.assertEqual(self.v2_cache.get('answer2', version=2), None)
-
- self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist', version=2)
-
- def test_custom_key_func(self):
- # Two caches with different key functions aren't visible to each other
- self.cache.set('answer1', 42)
- self.assertEqual(self.cache.get('answer1'), 42)
- self.assertEqual(self.custom_key_cache.get('answer1'), None)
- self.assertEqual(self.custom_key_cache2.get('answer1'), None)
-
- self.custom_key_cache.set('answer2', 42)
- self.assertEqual(self.cache.get('answer2'), None)
- self.assertEqual(self.custom_key_cache.get('answer2'), 42)
- self.assertEqual(self.custom_key_cache2.get('answer2'), 42)
-
-
- def test_cache_write_unpickable_object(self):
- update_middleware = UpdateCacheMiddleware()
- update_middleware.cache = self.cache
-
- fetch_middleware = FetchFromCacheMiddleware()
- fetch_middleware.cache = self.cache
-
- request = self._get_request_cache('/cache/test')
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertEqual(get_cache_data, None)
-
- response = HttpResponse()
- content = 'Testing cookie serialization.'
- response.content = content
- response.set_cookie('foo', 'bar')
-
- update_middleware.process_response(request, response)
-
- get_cache_data = fetch_middleware.process_request(request)
- self.assertNotEqual(get_cache_data, None)
- self.assertEqual(get_cache_data.content, content)
- self.assertEqual(get_cache_data.cookies, response.cookies)
-
- update_middleware.process_response(request, get_cache_data)
- get_cache_data = fetch_middleware.process_request(request)
- self.assertNotEqual(get_cache_data, None)
- self.assertEqual(get_cache_data.content, content)
- self.assertEqual(get_cache_data.cookies, response.cookies)
-
-def custom_key_func(key, key_prefix, version):
- "A customized cache key function"
- return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
-
-
-class DBCacheTests(BaseCacheTests, TransactionTestCase):
- backend_name = 'django.core.cache.backends.db.DatabaseCache'
-
- def setUp(self):
- # Spaces are used in the table name to ensure quoting/escaping is working
- self._table_name = 'test cache table'
- management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
- self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30})
- self.prefix_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_PREFIX='cacheprefix')
- self.v2_cache = get_cache(self.backend_name, LOCATION=self._table_name, VERSION=2)
- self.custom_key_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION=custom_key_func)
- self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
-
- def tearDown(self):
- from django.db import connection
- cursor = connection.cursor()
- cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name))
- connection.commit()
-
- def test_cull(self):
- self.perform_cull_test(50, 29)
-
- def test_zero_cull(self):
- self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
- self.perform_cull_test(50, 18)
-
- def test_old_initialization(self):
- self.cache = get_cache('db://%s?max_entries=30&cull_frequency=0' % self._table_name)
- self.perform_cull_test(50, 18)
-
- def test_second_call_doesnt_crash(self):
- err = StringIO.StringIO()
- management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False, stderr=err)
- self.assertTrue("Cache table 'test cache table' could not be created" in err.getvalue())
-
-
-DBCacheWithTimeZoneTests = override_settings(USE_TZ=True)(DBCacheTests)
-
-
-class DBCacheRouter(object):
- """A router that puts the cache table on the 'other' database."""
-
- def db_for_read(self, model, **hints):
- if model._meta.app_label == 'django_cache':
- return 'other'
-
- def db_for_write(self, model, **hints):
- if model._meta.app_label == 'django_cache':
- return 'other'
-
- def allow_syncdb(self, db, model):
- if model._meta.app_label == 'django_cache':
- return db == 'other'
-
-
-class CreateCacheTableForDBCacheTests(TestCase):
- multi_db = True
-
- def test_createcachetable_observes_database_router(self):
- old_routers = router.routers
- try:
- router.routers = [DBCacheRouter()]
- # cache table should not be created on 'default'
- with self.assertNumQueries(0, using='default'):
- management.call_command('createcachetable', 'cache_table',
- database='default',
- verbosity=0, interactive=False)
- # cache table should be created on 'other'
- # one query is used to create the table and another one the index
- with self.assertNumQueries(2, using='other'):
- management.call_command('createcachetable', 'cache_table',
- database='other',
- verbosity=0, interactive=False)
- finally:
- router.routers = old_routers
-
-
-class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
- backend_name = 'django.core.cache.backends.locmem.LocMemCache'
-
- def setUp(self):
- self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30})
- self.prefix_cache = get_cache(self.backend_name, KEY_PREFIX='cacheprefix')
- self.v2_cache = get_cache(self.backend_name, VERSION=2)
- self.custom_key_cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION=custom_key_func)
- self.custom_key_cache2 = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
-
- # LocMem requires a hack to make the other caches
- # share a data store with the 'normal' cache.
- self.prefix_cache._cache = self.cache._cache
- self.prefix_cache._expire_info = self.cache._expire_info
-
- self.v2_cache._cache = self.cache._cache
- self.v2_cache._expire_info = self.cache._expire_info
-
- self.custom_key_cache._cache = self.cache._cache
- self.custom_key_cache._expire_info = self.cache._expire_info
-
- self.custom_key_cache2._cache = self.cache._cache
- self.custom_key_cache2._expire_info = self.cache._expire_info
-
- def tearDown(self):
- self.cache.clear()
-
- def test_cull(self):
- self.perform_cull_test(50, 29)
-
- def test_zero_cull(self):
- self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
- self.perform_cull_test(50, 19)
-
- def test_old_initialization(self):
- self.cache = get_cache('locmem://?max_entries=30&cull_frequency=0')
- self.perform_cull_test(50, 19)
-
- def test_multiple_caches(self):
- "Check that multiple locmem caches are isolated"
- mirror_cache = get_cache(self.backend_name)
- other_cache = get_cache(self.backend_name, LOCATION='other')
-
- self.cache.set('value1', 42)
- self.assertEqual(mirror_cache.get('value1'), 42)
- self.assertEqual(other_cache.get('value1'), None)
-
- def test_incr_decr_timeout(self):
- """incr/decr does not modify expiry time (matches memcached behavior)"""
- key = 'value'
- _key = self.cache.make_key(key)
- self.cache.set(key, 1, timeout=self.cache.default_timeout*10)
- expire = self.cache._expire_info[_key]
- self.cache.incr(key)
- self.assertEqual(expire, self.cache._expire_info[_key])
- self.cache.decr(key)
- self.assertEqual(expire, self.cache._expire_info[_key])
-
-# memcached backend isn't guaranteed to be available.
-# To check the memcached backend, the test settings file will
-# need to contain a cache backend setting that points at
-# your memcache server.
-class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
- backend_name = 'django.core.cache.backends.memcached.MemcachedCache'
-
- def setUp(self):
- name = settings.CACHES[DEFAULT_CACHE_ALIAS]['LOCATION']
- self.cache = get_cache(self.backend_name, LOCATION=name)
- self.prefix_cache = get_cache(self.backend_name, LOCATION=name, KEY_PREFIX='cacheprefix')
- self.v2_cache = get_cache(self.backend_name, LOCATION=name, VERSION=2)
- self.custom_key_cache = get_cache(self.backend_name, LOCATION=name, KEY_FUNCTION=custom_key_func)
- self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=name, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
-
- def tearDown(self):
- self.cache.clear()
-
- def test_invalid_keys(self):
- """
- On memcached, we don't introduce a duplicate key validation
- step (for speed reasons), we just let the memcached API
- library raise its own exception on bad keys. Refs #6447.
-
- In order to be memcached-API-library agnostic, we only assert
- that a generic exception of some kind is raised.
-
- """
- # memcached does not allow whitespace or control characters in keys
- self.assertRaises(Exception, self.cache.set, 'key with spaces', 'value')
- # memcached limits key length to 250
- self.assertRaises(Exception, self.cache.set, 'a' * 251, 'value')
-
-MemcachedCacheTests = unittest.skipUnless(settings.CACHES[DEFAULT_CACHE_ALIAS]['BACKEND'].startswith('django.core.cache.backends.memcached.'), "memcached not available")(MemcachedCacheTests)
-
-
-class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
- """
- Specific test cases for the file-based cache.
- """
- backend_name = 'django.core.cache.backends.filebased.FileBasedCache'
-
- def setUp(self):
- self.dirname = tempfile.mkdtemp()
- self.cache = get_cache(self.backend_name, LOCATION=self.dirname, OPTIONS={'MAX_ENTRIES': 30})
- self.prefix_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_PREFIX='cacheprefix')
- self.v2_cache = get_cache(self.backend_name, LOCATION=self.dirname, VERSION=2)
- self.custom_key_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION=custom_key_func)
- self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
-
- def tearDown(self):
- self.cache.clear()
-
- def test_hashing(self):
- """Test that keys are hashed into subdirectories correctly"""
- self.cache.set("foo", "bar")
- key = self.cache.make_key("foo")
- keyhash = hashlib.md5(key).hexdigest()
- keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
- self.assertTrue(os.path.exists(keypath))
-
- def test_subdirectory_removal(self):
- """
- Make sure that the created subdirectories are correctly removed when empty.
- """
- self.cache.set("foo", "bar")
- key = self.cache.make_key("foo")
- keyhash = hashlib.md5(key).hexdigest()
- keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
- self.assertTrue(os.path.exists(keypath))
-
- self.cache.delete("foo")
- self.assertTrue(not os.path.exists(keypath))
- self.assertTrue(not os.path.exists(os.path.dirname(keypath)))
- self.assertTrue(not os.path.exists(os.path.dirname(os.path.dirname(keypath))))
-
- def test_cull(self):
- self.perform_cull_test(50, 29)
-
- def test_old_initialization(self):
- self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
- self.perform_cull_test(50, 29)
-
-
-class CustomCacheKeyValidationTests(unittest.TestCase):
- """
- Tests for the ability to mixin a custom ``validate_key`` method to
- a custom cache backend that otherwise inherits from a builtin
- backend, and override the default key validation. Refs #6447.
-
- """
- def test_custom_key_validation(self):
- cache = get_cache('regressiontests.cache.liberal_backend://')
-
- # this key is both longer than 250 characters, and has spaces
- key = 'some key with spaces' * 15
- val = 'a value'
- cache.set(key, val)
- self.assertEqual(cache.get(key), val)
-
-
-class GetCacheTests(unittest.TestCase):
-
- def test_simple(self):
- cache = get_cache('locmem://')
- from django.core.cache.backends.locmem import LocMemCache
- self.assertTrue(isinstance(cache, LocMemCache))
-
- from django.core.cache import cache
- self.assertTrue(isinstance(cache, get_cache('default').__class__))
-
- cache = get_cache(
- 'django.core.cache.backends.dummy.DummyCache', **{'TIMEOUT': 120})
- self.assertEqual(cache.default_timeout, 120)
-
- self.assertRaises(InvalidCacheBackendError, get_cache, 'does_not_exist')
-
- def test_close(self):
- from django.core import signals
- cache = get_cache('regressiontests.cache.closeable_cache.CacheClass')
- self.assertFalse(cache.closed)
- signals.request_finished.send(self.__class__)
- self.assertTrue(cache.closed)
-
-
-class CacheUtils(TestCase):
- """TestCase for django.utils.cache functions."""
-
- def setUp(self):
- self.path = '/cache/test/'
- self.cache = get_cache('default')
-
- def tearDown(self):
- self.cache.clear()
-
- def _get_request(self, path, method='GET'):
- request = HttpRequest()
- request.META = {
- 'SERVER_NAME': 'testserver',
- 'SERVER_PORT': 80,
- }
- request.method = method
- request.path = request.path_info = "/cache/%s" % path
- return request
-
- def test_patch_vary_headers(self):
- headers = (
- # Initial vary, new headers, resulting vary.
- (None, ('Accept-Encoding',), 'Accept-Encoding'),
- ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
- ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
- ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
- ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
- ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
- (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
- ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
- ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
- )
- for initial_vary, newheaders, resulting_vary in headers:
- response = HttpResponse()
- if initial_vary is not None:
- response['Vary'] = initial_vary
- patch_vary_headers(response, newheaders)
- self.assertEqual(response['Vary'], resulting_vary)
-
- def test_get_cache_key(self):
- request = self._get_request(self.path)
- response = HttpResponse()
- key_prefix = 'localprefix'
- # Expect None if no headers have been set yet.
- self.assertEqual(get_cache_key(request), None)
- # Set headers to an empty list.
- learn_cache_key(request, response)
- self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
- # Verify that a specified key_prefix is taken into account.
- learn_cache_key(request, response, key_prefix=key_prefix)
- self.assertEqual(get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
-
- def test_get_cache_key_with_query(self):
- request = self._get_request(self.path + '?test=1')
- response = HttpResponse()
- # Expect None if no headers have been set yet.
- self.assertEqual(get_cache_key(request), None)
- # Set headers to an empty list.
- learn_cache_key(request, response)
- # Verify that the querystring is taken into account.
- self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.bd889c5a59603af44333ed21504db3cd.d41d8cd98f00b204e9800998ecf8427e')
-
- def test_learn_cache_key(self):
- request = self._get_request(self.path, 'HEAD')
- response = HttpResponse()
- response['Vary'] = 'Pony'
- # Make sure that the Vary header is added to the key hash
- learn_cache_key(request, response)
- self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
-
- def test_patch_cache_control(self):
- tests = (
- # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
- (None, {'private' : True}, set(['private'])),
-
- # Test whether private/public attributes are mutually exclusive
- ('private', {'private' : True}, set(['private'])),
- ('private', {'public' : True}, set(['public'])),
- ('public', {'public' : True}, set(['public'])),
- ('public', {'private' : True}, set(['private'])),
- ('must-revalidate,max-age=60,private', {'public' : True}, set(['must-revalidate', 'max-age=60', 'public'])),
- ('must-revalidate,max-age=60,public', {'private' : True}, set(['must-revalidate', 'max-age=60', 'private'])),
- ('must-revalidate,max-age=60', {'public' : True}, set(['must-revalidate', 'max-age=60', 'public'])),
- )
-
- cc_delim_re = re.compile(r'\s*,\s*')
-
- for initial_cc, newheaders, expected_cc in tests:
- response = HttpResponse()
- if initial_cc is not None:
- response['Cache-Control'] = initial_cc
- patch_cache_control(response, **newheaders)
- parts = set(cc_delim_re.split(response['Cache-Control']))
- self.assertEqual(parts, expected_cc)
-
-CacheUtils = override_settings(
- CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
- CACHE_MIDDLEWARE_SECONDS=1,
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- },
- },
- USE_I18N=False,
-)(CacheUtils)
-
-PrefixedCacheUtils = override_settings(
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- 'KEY_PREFIX': 'cacheprefix',
- },
- },
-)(CacheUtils)
-
-
-class CacheHEADTest(TestCase):
-
- def setUp(self):
- self.path = '/cache/test/'
- self.cache = get_cache('default')
-
- def tearDown(self):
- self.cache.clear()
-
- def _get_request(self, method):
- request = HttpRequest()
- request.META = {
- 'SERVER_NAME': 'testserver',
- 'SERVER_PORT': 80,
- }
- request.method = method
- request.path = request.path_info = self.path
- return request
-
- def _get_request_cache(self, method):
- request = self._get_request(method)
- request._cache_update_cache = True
- return request
-
- def _set_cache(self, request, msg):
- response = HttpResponse()
- response.content = msg
- return UpdateCacheMiddleware().process_response(request, response)
-
- def test_head_caches_correctly(self):
- test_content = 'test content'
-
- request = self._get_request_cache('HEAD')
- self._set_cache(request, test_content)
-
- request = self._get_request('HEAD')
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertNotEqual(get_cache_data, None)
- self.assertEqual(test_content, get_cache_data.content)
-
- def test_head_with_cached_get(self):
- test_content = 'test content'
-
- request = self._get_request_cache('GET')
- self._set_cache(request, test_content)
-
- request = self._get_request('HEAD')
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertNotEqual(get_cache_data, None)
- self.assertEqual(test_content, get_cache_data.content)
-
-CacheHEADTest = override_settings(
- CACHE_MIDDLEWARE_SECONDS=60,
- CACHE_MIDDLEWARE_KEY_PREFIX='test',
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- },
- },
-)(CacheHEADTest)
-
-
-class CacheI18nTest(TestCase):
-
- def setUp(self):
- self.path = '/cache/test/'
- self.cache = get_cache('default')
-
- def tearDown(self):
- self.cache.clear()
-
- def _get_request(self, method='GET'):
- request = HttpRequest()
- request.META = {
- 'SERVER_NAME': 'testserver',
- 'SERVER_PORT': 80,
- }
- request.method = method
- request.path = request.path_info = self.path
- return request
-
- def _get_request_cache(self, query_string=None):
- request = HttpRequest()
- request.META = {
- 'SERVER_NAME': 'testserver',
- 'SERVER_PORT': 80,
- }
- if query_string:
- request.META['QUERY_STRING'] = query_string
- request.GET = QueryDict(query_string)
- request.path = request.path_info = self.path
- request._cache_update_cache = True
- request.method = 'GET'
- request.session = {}
- return request
-
- @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
- def test_cache_key_i18n_translation(self):
- request = self._get_request()
- lang = translation.get_language()
- response = HttpResponse()
- key = learn_cache_key(request, response)
- self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
- key2 = get_cache_key(request)
- self.assertEqual(key, key2)
-
- @override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False)
- def test_cache_key_i18n_formatting(self):
- request = self._get_request()
- lang = translation.get_language()
- response = HttpResponse()
- key = learn_cache_key(request, response)
- self.assertIn(lang, key, "Cache keys should include the language name when formatting is active")
- key2 = get_cache_key(request)
- self.assertEqual(key, key2)
-
- @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
- def test_cache_key_i18n_timezone(self):
- request = self._get_request()
- # This is tightly coupled to the implementation,
- # but it's the most straightforward way to test the key.
- tz = force_unicode(timezone.get_current_timezone_name(), errors='ignore')
- tz = tz.encode('ascii', 'ignore').replace(' ', '_')
- response = HttpResponse()
- key = learn_cache_key(request, response)
- self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
- key2 = get_cache_key(request)
- self.assertEqual(key, key2)
-
- @override_settings(USE_I18N=False, USE_L10N=False)
- def test_cache_key_no_i18n (self):
- request = self._get_request()
- lang = translation.get_language()
- tz = force_unicode(timezone.get_current_timezone_name(), errors='ignore')
- tz = tz.encode('ascii', 'ignore').replace(' ', '_')
- response = HttpResponse()
- key = learn_cache_key(request, response)
- self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
- self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
-
- @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
- def test_cache_key_with_non_ascii_tzname(self):
- # Regression test for #17476
- class CustomTzName(timezone.UTC):
- name = ''
- def tzname(self, dt):
- return self.name
-
- request = self._get_request()
- response = HttpResponse()
- with timezone.override(CustomTzName()):
- CustomTzName.name = 'Hora estándar de Argentina' # UTF-8 string
- sanitized_name = 'Hora_estndar_de_Argentina'
- self.assertIn(sanitized_name, learn_cache_key(request, response),
- "Cache keys should include the time zone name when time zones are active")
-
- CustomTzName.name = u'Hora estándar de Argentina' # unicode
- sanitized_name = 'Hora_estndar_de_Argentina'
- self.assertIn(sanitized_name, learn_cache_key(request, response),
- "Cache keys should include the time zone name when time zones are active")
-
-
- @override_settings(
- CACHE_MIDDLEWARE_KEY_PREFIX="test",
- CACHE_MIDDLEWARE_SECONDS=60,
- USE_ETAGS=True,
- USE_I18N=True,
- )
- def test_middleware(self):
- def set_cache(request, lang, msg):
- translation.activate(lang)
- response = HttpResponse()
- response.content = msg
- return UpdateCacheMiddleware().process_response(request, response)
-
- # cache with non empty request.GET
- request = self._get_request_cache(query_string='foo=bar&other=true')
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- # first access, cache must return None
- self.assertEqual(get_cache_data, None)
- response = HttpResponse()
- content = 'Check for cache with QUERY_STRING'
- response.content = content
- UpdateCacheMiddleware().process_response(request, response)
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- # cache must return content
- self.assertNotEqual(get_cache_data, None)
- self.assertEqual(get_cache_data.content, content)
- # different QUERY_STRING, cache must be empty
- request = self._get_request_cache(query_string='foo=bar&somethingelse=true')
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertEqual(get_cache_data, None)
-
- # i18n tests
- en_message ="Hello world!"
- es_message ="Hola mundo!"
-
- request = self._get_request_cache()
- set_cache(request, 'en', en_message)
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- # Check that we can recover the cache
- self.assertNotEqual(get_cache_data, None)
- self.assertEqual(get_cache_data.content, en_message)
- # Check that we use etags
- self.assertTrue(get_cache_data.has_header('ETag'))
- # Check that we can disable etags
- with self.settings(USE_ETAGS=False):
- request._cache_update_cache = True
- set_cache(request, 'en', en_message)
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertFalse(get_cache_data.has_header('ETag'))
- # change the session language and set content
- request = self._get_request_cache()
- set_cache(request, 'es', es_message)
- # change again the language
- translation.activate('en')
- # retrieve the content from cache
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertEqual(get_cache_data.content, en_message)
- # change again the language
- translation.activate('es')
- get_cache_data = FetchFromCacheMiddleware().process_request(request)
- self.assertEqual(get_cache_data.content, es_message)
- # reset the language
- translation.deactivate()
-
-CacheI18nTest = override_settings(
- CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- },
- },
- LANGUAGES=(
- ('en', 'English'),
- ('es', 'Spanish'),
- ),
-)(CacheI18nTest)
-
-PrefixedCacheI18nTest = override_settings(
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- 'KEY_PREFIX': 'cacheprefix'
- },
- },
-)(CacheI18nTest)
-
-
-def hello_world_view(request, value):
- return HttpResponse('Hello World %s' % value)
-
-
-class CacheMiddlewareTest(TestCase):
-
- def setUp(self):
- self.factory = RequestFactory()
- self.default_cache = get_cache('default')
- self.other_cache = get_cache('other')
-
- def tearDown(self):
- self.default_cache.clear()
- self.other_cache.clear()
-
- def test_constructor(self):
- """
- Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
- Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
- appropriately.
- """
- # If no arguments are passed in construction, it's being used as middleware.
- middleware = CacheMiddleware()
-
- # Now test object attributes against values defined in setUp above
- self.assertEqual(middleware.cache_timeout, 30)
- self.assertEqual(middleware.key_prefix, 'middlewareprefix')
- self.assertEqual(middleware.cache_alias, 'other')
- self.assertEqual(middleware.cache_anonymous_only, False)
-
- # If arguments are being passed in construction, it's being used as a decorator.
- # First, test with "defaults":
- as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None)
-
- self.assertEqual(as_view_decorator.cache_timeout, 300) # Timeout value for 'default' cache, i.e. 300
- self.assertEqual(as_view_decorator.key_prefix, '')
- self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache
- self.assertEqual(as_view_decorator.cache_anonymous_only, False)
-
- # Next, test with custom values:
- as_view_decorator_with_custom = CacheMiddleware(cache_anonymous_only=True, cache_timeout=60, cache_alias='other', key_prefix='foo')
-
- self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
- self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
- self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
- self.assertEqual(as_view_decorator_with_custom.cache_anonymous_only, True)
-
- def test_middleware(self):
- middleware = CacheMiddleware()
- prefix_middleware = CacheMiddleware(key_prefix='prefix1')
- timeout_middleware = CacheMiddleware(cache_timeout=1)
-
- request = self.factory.get('/view/')
-
- # Put the request through the request middleware
- result = middleware.process_request(request)
- self.assertEqual(result, None)
-
- response = hello_world_view(request, '1')
-
- # Now put the response through the response middleware
- response = middleware.process_response(request, response)
-
- # Repeating the request should result in a cache hit
- result = middleware.process_request(request)
- self.assertNotEquals(result, None)
- self.assertEqual(result.content, 'Hello World 1')
-
- # The same request through a different middleware won't hit
- result = prefix_middleware.process_request(request)
- self.assertEqual(result, None)
-
- # The same request with a timeout _will_ hit
- result = timeout_middleware.process_request(request)
- self.assertNotEquals(result, None)
- self.assertEqual(result.content, 'Hello World 1')
-
- @override_settings(CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True)
- def test_cache_middleware_anonymous_only_wont_cause_session_access(self):
- """ The cache middleware shouldn't cause a session access due to
- CACHE_MIDDLEWARE_ANONYMOUS_ONLY if nothing else has accessed the
- session. Refs 13283 """
-
- from django.contrib.sessions.middleware import SessionMiddleware
- from django.contrib.auth.middleware import AuthenticationMiddleware
-
- middleware = CacheMiddleware()
- session_middleware = SessionMiddleware()
- auth_middleware = AuthenticationMiddleware()
-
- request = self.factory.get('/view_anon/')
-
- # Put the request through the request middleware
- session_middleware.process_request(request)
- auth_middleware.process_request(request)
- result = middleware.process_request(request)
- self.assertEqual(result, None)
-
- response = hello_world_view(request, '1')
-
- # Now put the response through the response middleware
- session_middleware.process_response(request, response)
- response = middleware.process_response(request, response)
-
- self.assertEqual(request.session.accessed, False)
-
- @override_settings(CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True)
- def test_cache_middleware_anonymous_only_with_cache_page(self):
- """CACHE_MIDDLEWARE_ANONYMOUS_ONLY should still be effective when used
- with the cache_page decorator: the response to a request from an
- authenticated user should not be cached."""
-
- request = self.factory.get('/view_anon/')
-
- class MockAuthenticatedUser(object):
- def is_authenticated(self):
- return True
-
- class MockAccessedSession(object):
- accessed = True
-
- request.user = MockAuthenticatedUser()
- request.session = MockAccessedSession()
-
- response = cache_page(hello_world_view)(request, '1')
-
- self.assertFalse("Cache-Control" in response)
-
- def test_view_decorator(self):
- # decorate the same view with different cache decorators
- default_view = cache_page(hello_world_view)
- default_with_prefix_view = cache_page(key_prefix='prefix1')(hello_world_view)
-
- explicit_default_view = cache_page(cache='default')(hello_world_view)
- explicit_default_with_prefix_view = cache_page(cache='default', key_prefix='prefix1')(hello_world_view)
-
- other_view = cache_page(cache='other')(hello_world_view)
- other_with_prefix_view = cache_page(cache='other', key_prefix='prefix2')(hello_world_view)
- other_with_timeout_view = cache_page(3, cache='other', key_prefix='prefix3')(hello_world_view)
-
- request = self.factory.get('/view/')
-
- # Request the view once
- response = default_view(request, '1')
- self.assertEqual(response.content, 'Hello World 1')
-
- # Request again -- hit the cache
- response = default_view(request, '2')
- self.assertEqual(response.content, 'Hello World 1')
-
- # Requesting the same view with the explicit cache should yield the same result
- response = explicit_default_view(request, '3')
- self.assertEqual(response.content, 'Hello World 1')
-
- # Requesting with a prefix will hit a different cache key
- response = explicit_default_with_prefix_view(request, '4')
- self.assertEqual(response.content, 'Hello World 4')
-
- # Hitting the same view again gives a cache hit
- response = explicit_default_with_prefix_view(request, '5')
- self.assertEqual(response.content, 'Hello World 4')
-
- # And going back to the implicit cache will hit the same cache
- response = default_with_prefix_view(request, '6')
- self.assertEqual(response.content, 'Hello World 4')
-
- # Requesting from an alternate cache won't hit cache
- response = other_view(request, '7')
- self.assertEqual(response.content, 'Hello World 7')
-
- # But a repeated hit will hit cache
- response = other_view(request, '8')
- self.assertEqual(response.content, 'Hello World 7')
-
- # And prefixing the alternate cache yields yet another cache entry
- response = other_with_prefix_view(request, '9')
- self.assertEqual(response.content, 'Hello World 9')
-
- # Request from the alternate cache with a new prefix and a custom timeout
- response = other_with_timeout_view(request, '10')
- self.assertEqual(response.content, 'Hello World 10')
-
- # But if we wait a couple of seconds...
- time.sleep(2)
-
- # ... the default cache will still hit
- cache = get_cache('default')
- response = default_view(request, '11')
- self.assertEqual(response.content, 'Hello World 1')
-
- # ... the default cache with a prefix will still hit
- response = default_with_prefix_view(request, '12')
- self.assertEqual(response.content, 'Hello World 4')
-
- # ... the explicit default cache will still hit
- response = explicit_default_view(request, '13')
- self.assertEqual(response.content, 'Hello World 1')
-
- # ... the explicit default cache with a prefix will still hit
- response = explicit_default_with_prefix_view(request, '14')
- self.assertEqual(response.content, 'Hello World 4')
-
- # .. but a rapidly expiring cache won't hit
- response = other_view(request, '15')
- self.assertEqual(response.content, 'Hello World 15')
-
- # .. even if it has a prefix
- response = other_with_prefix_view(request, '16')
- self.assertEqual(response.content, 'Hello World 16')
-
- # ... but a view with a custom timeout will still hit
- response = other_with_timeout_view(request, '17')
- self.assertEqual(response.content, 'Hello World 10')
-
- # And if we wait a few more seconds
- time.sleep(2)
-
- # the custom timeouot cache will miss
- response = other_with_timeout_view(request, '18')
- self.assertEqual(response.content, 'Hello World 18')
-
-CacheMiddlewareTest = override_settings(
- CACHE_MIDDLEWARE_ALIAS='other',
- CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
- CACHE_MIDDLEWARE_SECONDS=30,
- CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- },
- 'other': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- 'LOCATION': 'other',
- 'TIMEOUT': '1',
- },
- },
-)(CacheMiddlewareTest)
-
-
-class TestWithTemplateResponse(TestCase):
- """
- Tests various headers w/ TemplateResponse.
-
- Most are probably redundant since they manipulate the same object
- anyway but the Etag header is 'special' because it relies on the
- content being complete (which is not necessarily always the case
- with a TemplateResponse)
- """
- def setUp(self):
- self.path = '/cache/test/'
- self.cache = get_cache('default')
-
- def tearDown(self):
- self.cache.clear()
-
- def _get_request(self, path, method='GET'):
- request = HttpRequest()
- request.META = {
- 'SERVER_NAME': 'testserver',
- 'SERVER_PORT': 80,
- }
- request.method = method
- request.path = request.path_info = "/cache/%s" % path
- return request
-
- def test_patch_vary_headers(self):
- headers = (
- # Initial vary, new headers, resulting vary.
- (None, ('Accept-Encoding',), 'Accept-Encoding'),
- ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
- ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
- ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
- ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
- ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
- (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
- ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
- ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
- )
- for initial_vary, newheaders, resulting_vary in headers:
- response = TemplateResponse(HttpResponse(), Template("This is a test"))
- if initial_vary is not None:
- response['Vary'] = initial_vary
- patch_vary_headers(response, newheaders)
- self.assertEqual(response['Vary'], resulting_vary)
-
- def test_get_cache_key(self):
- request = self._get_request(self.path)
- response = TemplateResponse(HttpResponse(), Template("This is a test"))
- key_prefix = 'localprefix'
- # Expect None if no headers have been set yet.
- self.assertEqual(get_cache_key(request), None)
- # Set headers to an empty list.
- learn_cache_key(request, response)
- self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
- # Verify that a specified key_prefix is taken into account.
- learn_cache_key(request, response, key_prefix=key_prefix)
- self.assertEqual(get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
-
- def test_get_cache_key_with_query(self):
- request = self._get_request(self.path + '?test=1')
- response = TemplateResponse(HttpResponse(), Template("This is a test"))
- # Expect None if no headers have been set yet.
- self.assertEqual(get_cache_key(request), None)
- # Set headers to an empty list.
- learn_cache_key(request, response)
- # Verify that the querystring is taken into account.
- self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.bd889c5a59603af44333ed21504db3cd.d41d8cd98f00b204e9800998ecf8427e')
-
- @override_settings(USE_ETAGS=False)
- def test_without_etag(self):
- response = TemplateResponse(HttpResponse(), Template("This is a test"))
- self.assertFalse(response.has_header('ETag'))
- patch_response_headers(response)
- self.assertFalse(response.has_header('ETag'))
- response = response.render()
- self.assertFalse(response.has_header('ETag'))
-
- @override_settings(USE_ETAGS=True)
- def test_with_etag(self):
- response = TemplateResponse(HttpResponse(), Template("This is a test"))
- self.assertFalse(response.has_header('ETag'))
- patch_response_headers(response)
- self.assertFalse(response.has_header('ETag'))
- response = response.render()
- self.assertTrue(response.has_header('ETag'))
-
-TestWithTemplateResponse = override_settings(
- CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
- CACHE_MIDDLEWARE_SECONDS=1,
- CACHES={
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- },
- },
- USE_I18N=False,
-)(TestWithTemplateResponse)
-
-
-class TestEtagWithAdmin(TestCase):
- # See https://code.djangoproject.com/ticket/16003
- urls = "regressiontests.admin_views.urls"
-
- def test_admin(self):
- with self.settings(USE_ETAGS=False):
- response = self.client.get('/test_admin/admin/')
- self.assertEqual(response.status_code, 200)
- self.assertFalse(response.has_header('ETag'))
-
- with self.settings(USE_ETAGS=True):
- response = self.client.get('/test_admin/admin/')
- self.assertEqual(response.status_code, 200)
- self.assertTrue(response.has_header('ETag'))
diff --git a/tests/django14/custom_columns/models.py b/tests/django14/custom_columns/models.py
deleted file mode 100644
index 17d3d79f..00000000
--- a/tests/django14/custom_columns/models.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
-17. Custom column/table names
-
-If your database column name is different than your model attribute, use the
-``db_column`` parameter. Note that you'll use the field's name, not its column
-name, in API usage.
-
-If your database table name is different than your model name, use the
-``db_table`` Meta attribute. This has no effect on the API used to
-query the database.
-
-If you need to use a table name for a many-to-many relationship that differs
-from the default generated name, use the ``db_table`` parameter on the
-``ManyToManyField``. This has no effect on the API for querying the database.
-
-"""
-
-from django.db import models
-
-
-class Author(models.Model):
- first_name = models.CharField(max_length=30, db_column='firstname')
- last_name = models.CharField(max_length=30, db_column='last')
-
- def __unicode__(self):
- return u'%s %s' % (self.first_name, self.last_name)
-
- class Meta:
- db_table = 'my_author_table'
- ordering = ('last_name','first_name')
-
-class Article(models.Model):
- headline = models.CharField(max_length=100)
- authors = models.ManyToManyField(Author, db_table='my_m2m_table')
-
- def __unicode__(self):
- return self.headline
-
- class Meta:
- ordering = ('headline',)
-
diff --git a/tests/django14/custom_columns/tests.py b/tests/django14/custom_columns/tests.py
deleted file mode 100644
index c1bb6f0a..00000000
--- a/tests/django14/custom_columns/tests.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from __future__ import absolute_import
-
-from django.core.exceptions import FieldError
-from django.test import TestCase
-
-from .models import Author, Article
-
-
-class CustomColumnsTests(TestCase):
- def test_db_column(self):
- a1 = Author.objects.create(first_name="John", last_name="Smith")
- a2 = Author.objects.create(first_name="Peter", last_name="Jones")
-
- art = Article.objects.create(headline="Django lets you build Web apps easily")
- art.authors = [a1, a2]
-
- # Although the table and column names on Author have been set to custom
- # values, nothing about using the Author model has changed...
-
- # Query the available authors
- self.assertQuerysetEqual(
- Author.objects.all(), [
- "Peter Jones", "John Smith",
- ],
- unicode
- )
- self.assertQuerysetEqual(
- Author.objects.filter(first_name__exact="John"), [
- "John Smith",
- ],
- unicode
- )
- self.assertEqual(
- Author.objects.get(first_name__exact="John"),
- a1,
- )
-
- self.assertRaises(FieldError,
- lambda: Author.objects.filter(firstname__exact="John")
- )
-
- a = Author.objects.get(last_name__exact="Smith")
- a.first_name = "John"
- a.last_name = "Smith"
-
- self.assertRaises(AttributeError, lambda: a.firstname)
- self.assertRaises(AttributeError, lambda: a.last)
-
- # Although the Article table uses a custom m2m table,
- # nothing about using the m2m relationship has changed...
-
- # Get all the authors for an article
- self.assertQuerysetEqual(
- art.authors.all(), [
- "Peter Jones",
- "John Smith",
- ],
- unicode
- )
- # Get the articles for an author
- self.assertQuerysetEqual(
- a.article_set.all(), [
- "Django lets you build Web apps easily",
- ],
- lambda a: a.headline
- )
- # Query the authors across the m2m relation
- self.assertQuerysetEqual(
- art.authors.filter(last_name='Jones'), [
- "Peter Jones"
- ],
- unicode
- )
diff --git a/tests/django14/custom_columns_regress/models.py b/tests/django14/custom_columns_regress/models.py
deleted file mode 100644
index fcb5a4be..00000000
--- a/tests/django14/custom_columns_regress/models.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-Regression for #9736.
-
-Checks some pathological column naming to make sure it doesn't break
-table creation or queries.
-
-"""
-
-from django.db import models
-
-
-class Article(models.Model):
- Article_ID = models.AutoField(primary_key=True, db_column='Article ID')
- headline = models.CharField(max_length=100)
- authors = models.ManyToManyField('Author', db_table='my m2m table')
- primary_author = models.ForeignKey('Author', db_column='Author ID', related_name='primary_set')
-
- def __unicode__(self):
- return self.headline
-
- class Meta:
- ordering = ('headline',)
-
-class Author(models.Model):
- Author_ID = models.AutoField(primary_key=True, db_column='Author ID')
- first_name = models.CharField(max_length=30, db_column='first name')
- last_name = models.CharField(max_length=30, db_column='last name')
-
- def __unicode__(self):
- return u'%s %s' % (self.first_name, self.last_name)
-
- class Meta:
- db_table = 'my author table'
- ordering = ('last_name','first_name')
-
-
-
diff --git a/tests/django14/custom_managers/models.py b/tests/django14/custom_managers/models.py
deleted file mode 100644
index 1052552b..00000000
--- a/tests/django14/custom_managers/models.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""
-23. Giving models a custom manager
-
-You can use a custom ``Manager`` in a particular model by extending the base
-``Manager`` class and instantiating your custom ``Manager`` in your model.
-
-There are two reasons you might want to customize a ``Manager``: to add extra
-``Manager`` methods, and/or to modify the initial ``QuerySet`` the ``Manager``
-returns.
-"""
-
-from django.db import models
-
-# An example of a custom manager called "objects".
-
-class PersonManager(models.Manager):
- def get_fun_people(self):
- return self.filter(fun=True)
-
-class Person(models.Model):
- first_name = models.CharField(max_length=30)
- last_name = models.CharField(max_length=30)
- fun = models.BooleanField()
- objects = PersonManager()
-
- def __unicode__(self):
- return u"%s %s" % (self.first_name, self.last_name)
-
-# An example of a custom manager that sets get_query_set().
-
-class PublishedBookManager(models.Manager):
- def get_query_set(self):
- return super(PublishedBookManager, self).get_query_set().filter(is_published=True)
-
-class Book(models.Model):
- title = models.CharField(max_length=50)
- author = models.CharField(max_length=30)
- is_published = models.BooleanField()
- published_objects = PublishedBookManager()
- authors = models.ManyToManyField(Person, related_name='books')
-
- def __unicode__(self):
- return self.title
-
-# An example of providing multiple custom managers.
-
-class FastCarManager(models.Manager):
- def get_query_set(self):
- return super(FastCarManager, self).get_query_set().filter(top_speed__gt=150)
-
-class Car(models.Model):
- name = models.CharField(max_length=10)
- mileage = models.IntegerField()
- top_speed = models.IntegerField(help_text="In miles per hour.")
- cars = models.Manager()
- fast_cars = FastCarManager()
-
- def __unicode__(self):
- return self.name
diff --git a/tests/django14/custom_managers/tests.py b/tests/django14/custom_managers/tests.py
deleted file mode 100644
index bdba3d07..00000000
--- a/tests/django14/custom_managers/tests.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from __future__ import absolute_import
-
-from django.test import TestCase
-
-from .models import Person, Book, Car, PersonManager, PublishedBookManager
-
-
-class CustomManagerTests(TestCase):
- def test_manager(self):
- p1 = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
- p2 = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False)
-
- self.assertQuerysetEqual(
- Person.objects.get_fun_people(), [
- "Bugs Bunny"
- ],
- unicode
- )
- # The RelatedManager used on the 'books' descriptor extends the default
- # manager
- self.assertTrue(isinstance(p2.books, PublishedBookManager))
-
- b1 = Book.published_objects.create(
- title="How to program", author="Rodney Dangerfield", is_published=True
- )
- b2 = Book.published_objects.create(
- title="How to be smart", author="Albert Einstein", is_published=False
- )
-
- # The default manager, "objects", doesn't exist, because a custom one
- # was provided.
- self.assertRaises(AttributeError, lambda: Book.objects)
-
- # The RelatedManager used on the 'authors' descriptor extends the
- # default manager
- self.assertTrue(isinstance(b2.authors, PersonManager))
-
- self.assertQuerysetEqual(
- Book.published_objects.all(), [
- "How to program",
- ],
- lambda b: b.title
- )
-
- c1 = Car.cars.create(name="Corvette", mileage=21, top_speed=180)
- c2 = Car.cars.create(name="Neon", mileage=31, top_speed=100)
-
- self.assertQuerysetEqual(
- Car.cars.order_by("name"), [
- "Corvette",
- "Neon",
- ],
- lambda c: c.name
- )
-
- self.assertQuerysetEqual(
- Car.fast_cars.all(), [
- "Corvette",
- ],
- lambda c: c.name
- )
-
- # Each model class gets a "_default_manager" attribute, which is a
- # reference to the first manager defined in the class. In this case,
- # it's "cars".
-
- self.assertQuerysetEqual(
- Car._default_manager.order_by("name"), [
- "Corvette",
- "Neon",
- ],
- lambda c: c.name
- )
diff --git a/tests/django14/custom_managers_regress/models.py b/tests/django14/custom_managers_regress/models.py
deleted file mode 100644
index 3c4e6217..00000000
--- a/tests/django14/custom_managers_regress/models.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
-Regression tests for custom manager classes.
-"""
-
-from django.db import models
-
-
-class RestrictedManager(models.Manager):
- """
- A manager that filters out non-public instances.
- """
- def get_query_set(self):
- return super(RestrictedManager, self).get_query_set().filter(is_public=True)
-
-class RelatedModel(models.Model):
- name = models.CharField(max_length=50)
-
- def __unicode__(self):
- return self.name
-
-class RestrictedModel(models.Model):
- name = models.CharField(max_length=50)
- is_public = models.BooleanField(default=False)
- related = models.ForeignKey(RelatedModel)
-
- objects = RestrictedManager()
- plain_manager = models.Manager()
-
- def __unicode__(self):
- return self.name
-
-class OneToOneRestrictedModel(models.Model):
- name = models.CharField(max_length=50)
- is_public = models.BooleanField(default=False)
- related = models.OneToOneField(RelatedModel)
-
- objects = RestrictedManager()
- plain_manager = models.Manager()
-
- def __unicode__(self):
- return self.name
diff --git a/tests/django14/custom_methods/models.py b/tests/django14/custom_methods/models.py
deleted file mode 100644
index 4e3da588..00000000
--- a/tests/django14/custom_methods/models.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""
-3. Giving models custom methods
-
-Any method you add to a model will be available to instances.
-"""
-
-import datetime
-
-from django.db import models
-
-
-class Article(models.Model):
- headline = models.CharField(max_length=100)
- pub_date = models.DateField()
-
- def __unicode__(self):
- return self.headline
-
- def was_published_today(self):
- return self.pub_date == datetime.date.today()
-
- def articles_from_same_day_1(self):
- return Article.objects.filter(pub_date=self.pub_date).exclude(id=self.id)
-
- def articles_from_same_day_2(self):
- """
- Verbose version of get_articles_from_same_day_1, which does a custom
- database query for the sake of demonstration.
- """
- from django.db import connection
- cursor = connection.cursor()
- cursor.execute("""
- SELECT id, headline, pub_date
- FROM custom_methods_article
- WHERE pub_date = %s
- AND id != %s""", [connection.ops.value_to_db_date(self.pub_date),
- self.id])
- return [self.__class__(*row) for row in cursor.fetchall()]
diff --git a/tests/django14/custom_pk/fields.py b/tests/django14/custom_pk/fields.py
deleted file mode 100644
index 40551a36..00000000
--- a/tests/django14/custom_pk/fields.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import random
-import string
-
-from django.db import models
-
-
-class MyWrapper(object):
- def __init__(self, value):
- self.value = value
-
- def __repr__(self):
- return "<%s: %s>" % (self.__class__.__name__, self.value)
-
- def __unicode__(self):
- return self.value
-
- def __eq__(self, other):
- if isinstance(other, self.__class__):
- return self.value == other.value
- return self.value == other
-
-class MyAutoField(models.CharField):
- __metaclass__ = models.SubfieldBase
-
- def __init__(self, *args, **kwargs):
- kwargs['max_length'] = 10
- super(MyAutoField, self).__init__(*args, **kwargs)
-
- def pre_save(self, instance, add):
- value = getattr(instance, self.attname, None)
- if not value:
- value = MyWrapper(''.join(random.sample(string.lowercase, 10)))
- setattr(instance, self.attname, value)
- return value
-
- def to_python(self, value):
- if not value:
- return
- if not isinstance(value, MyWrapper):
- value = MyWrapper(value)
- return value
-
- def get_db_prep_save(self, value, connection):
- if not value:
- return
- if isinstance(value, MyWrapper):
- return unicode(value)
- return value
-
- def get_db_prep_value(self, value, connection, prepared=False):
- if not value:
- return
- if isinstance(value, MyWrapper):
- return unicode(value)
- return value
diff --git a/tests/django14/custom_pk/models.py b/tests/django14/custom_pk/models.py
deleted file mode 100644
index e8647800..00000000
--- a/tests/django14/custom_pk/models.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-14. Using a custom primary key
-
-By default, Django adds an ``"id"`` field to each model. But you can override
-this behavior by explicitly adding ``primary_key=True`` to a field.
-"""
-
-from __future__ import absolute_import
-
-from django.db import models
-
-from .fields import MyAutoField
-
-
-class Employee(models.Model):
- employee_code = models.IntegerField(primary_key=True, db_column = 'code')
- first_name = models.CharField(max_length=20)
- last_name = models.CharField(max_length=20)
- class Meta:
- ordering = ('last_name', 'first_name')
-
- def __unicode__(self):
- return u"%s %s" % (self.first_name, self.last_name)
-
-class Business(models.Model):
- name = models.CharField(max_length=20, primary_key=True)
- employees = models.ManyToManyField(Employee)
- class Meta:
- verbose_name_plural = 'businesses'
-
- def __unicode__(self):
- return self.name
-
-class Bar(models.Model):
- id = MyAutoField(primary_key=True, db_index=True)
-
- def __unicode__(self):
- return repr(self.pk)
-
-
-class Foo(models.Model):
- bar = models.ForeignKey(Bar)
-
diff --git a/tests/django14/custom_pk/tests.py b/tests/django14/custom_pk/tests.py
deleted file mode 100644
index 1e3ca8e5..00000000
--- a/tests/django14/custom_pk/tests.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-
-from django.db import transaction, IntegrityError
-from django.test import TestCase, skipIfDBFeature
-
-from .models import Employee, Business, Bar, Foo
-
-
-class CustomPKTests(TestCase):
- def test_custom_pk(self):
- dan = Employee.objects.create(
- employee_code=123, first_name="Dan", last_name="Jones"
- )
- self.assertQuerysetEqual(
- Employee.objects.all(), [
- "Dan Jones",
- ],
- unicode
- )
-
- fran = Employee.objects.create(
- employee_code=456, first_name="Fran", last_name="Bones"
- )
- self.assertQuerysetEqual(
- Employee.objects.all(), [
- "Fran Bones",
- "Dan Jones",
- ],
- unicode
- )
-
- self.assertEqual(Employee.objects.get(pk=123), dan)
- self.assertEqual(Employee.objects.get(pk=456), fran)
-
- self.assertRaises(Employee.DoesNotExist,
- lambda: Employee.objects.get(pk=42)
- )
-
- # Use the name of the primary key, rather than pk.
- self.assertEqual(Employee.objects.get(employee_code=123), dan)
- # pk can be used as a substitute for the primary key.
- self.assertQuerysetEqual(
- Employee.objects.filter(pk__in=[123, 456]), [
- "Fran Bones",
- "Dan Jones",
- ],
- unicode
- )
- # The primary key can be accessed via the pk property on the model.
- e = Employee.objects.get(pk=123)
- self.assertEqual(e.pk, 123)
- # Or we can use the real attribute name for the primary key:
- self.assertEqual(e.employee_code, 123)
-
- # Fran got married and changed her last name.
- fran = Employee.objects.get(pk=456)
- fran.last_name = "Jones"
- fran.save()
-
- self.assertQuerysetEqual(
- Employee.objects.filter(last_name="Jones"), [
- "Dan Jones",
- "Fran Jones",
- ],
- unicode
- )
-
- emps = Employee.objects.in_bulk([123, 456])
- self.assertEqual(emps[123], dan)
-
- b = Business.objects.create(name="Sears")
- b.employees.add(dan, fran)
- self.assertQuerysetEqual(
- b.employees.all(), [
- "Dan Jones",
- "Fran Jones",
- ],
- unicode
- )
- self.assertQuerysetEqual(
- fran.business_set.all(), [
- "Sears",
- ],
- lambda b: b.name
- )
-
- self.assertEqual(Business.objects.in_bulk(["Sears"]), {
- "Sears": b,
- })
-
- self.assertQuerysetEqual(
- Business.objects.filter(name="Sears"), [
- "Sears"
- ],
- lambda b: b.name
- )
- self.assertQuerysetEqual(
- Business.objects.filter(pk="Sears"), [
- "Sears",
- ],
- lambda b: b.name
- )
-
- # Queries across tables, involving primary key
- self.assertQuerysetEqual(
- Employee.objects.filter(business__name="Sears"), [
- "Dan Jones",
- "Fran Jones",
- ],
- unicode,
- )
- self.assertQuerysetEqual(
- Employee.objects.filter(business__pk="Sears"), [
- "Dan Jones",
- "Fran Jones",
- ],
- unicode,
- )
-
- self.assertQuerysetEqual(
- Business.objects.filter(employees__employee_code=123), [
- "Sears",
- ],
- lambda b: b.name
- )
- self.assertQuerysetEqual(
- Business.objects.filter(employees__pk=123), [
- "Sears",
- ],
- lambda b: b.name,
- )
-
- self.assertQuerysetEqual(
- Business.objects.filter(employees__first_name__startswith="Fran"), [
- "Sears",
- ],
- lambda b: b.name
- )
-
- def test_unicode_pk(self):
- # Primary key may be unicode string
- bus = Business.objects.create(name=u'jaźń')
-
- def test_unique_pk(self):
- # The primary key must also obviously be unique, so trying to create a
- # new object with the same primary key will fail.
- e = Employee.objects.create(
- employee_code=123, first_name="Frank", last_name="Jones"
- )
- sid = transaction.savepoint()
- self.assertRaises(IntegrityError,
- Employee.objects.create, employee_code=123, first_name="Fred", last_name="Jones"
- )
- transaction.savepoint_rollback(sid)
-
- def test_custom_field_pk(self):
- # Regression for #10785 -- Custom fields can be used for primary keys.
- new_bar = Bar.objects.create()
- new_foo = Foo.objects.create(bar=new_bar)
-
- f = Foo.objects.get(bar=new_bar.pk)
- self.assertEqual(f, new_foo)
- self.assertEqual(f.bar, new_bar)
-
- f = Foo.objects.get(bar=new_bar)
- self.assertEqual(f, new_foo),
- self.assertEqual(f.bar, new_bar)
-
- # SQLite lets objects be saved with an empty primary key, even though an
- # integer is expected. So we can't check for an error being raised in that
- # case for SQLite. Remove it from the suite for this next bit.
- @skipIfDBFeature('supports_unspecified_pk')
- def test_required_pk(self):
- # The primary key must be specified, so an error is raised if you
- # try to create an object without it.
- sid = transaction.savepoint()
- self.assertRaises(IntegrityError,
- Employee.objects.create, first_name="Tom", last_name="Smith"
- )
- transaction.savepoint_rollback(sid)
diff --git a/tests/django14/datatypes/models.py b/tests/django14/datatypes/models.py
deleted file mode 100644
index 332ce843..00000000
--- a/tests/django14/datatypes/models.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
-This is a basic model to test saving and loading boolean and date-related
-types, which in the past were problematic for some database backends.
-"""
-
-from django.db import models
-
-
-class Donut(models.Model):
- name = models.CharField(max_length=100)
- is_frosted = models.BooleanField(default=False)
- has_sprinkles = models.NullBooleanField()
- baked_date = models.DateField(null=True)
- baked_time = models.TimeField(null=True)
- consumed_at = models.DateTimeField(null=True)
- review = models.TextField()
-
- class Meta:
- ordering = ('consumed_at',)
-
- def __str__(self):
- return self.name
-
-class RumBaba(models.Model):
- baked_date = models.DateField(auto_now_add=True)
- baked_timestamp = models.DateTimeField(auto_now_add=True)
diff --git a/tests/django14/datatypes/tests.py b/tests/django14/datatypes/tests.py
deleted file mode 100644
index fb94e831..00000000
--- a/tests/django14/datatypes/tests.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-
-from django.test import TestCase, skipIfDBFeature
-from django.utils.timezone import utc
-
-from .models import Donut, RumBaba
-
-
-class DataTypesTestCase(TestCase):
-
- def test_boolean_type(self):
- d = Donut(name='Apple Fritter')
- self.assertFalse(d.is_frosted)
- self.assertTrue(d.has_sprinkles is None)
- d.has_sprinkles = True
- self.assertTrue(d.has_sprinkles)
-
- d.save()
-
- d2 = Donut.objects.get(name='Apple Fritter')
- self.assertFalse(d2.is_frosted)
- self.assertTrue(d2.has_sprinkles)
-
- def test_date_type(self):
- d = Donut(name='Apple Fritter')
- d.baked_date = datetime.date(year=1938, month=6, day=4)
- d.baked_time = datetime.time(hour=5, minute=30)
- d.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
- d.save()
-
- d2 = Donut.objects.get(name='Apple Fritter')
- self.assertEqual(d2.baked_date, datetime.date(1938, 6, 4))
- self.assertEqual(d2.baked_time, datetime.time(5, 30))
- self.assertEqual(d2.consumed_at, datetime.datetime(2007, 4, 20, 16, 19, 59))
-
- def test_time_field(self):
- #Test for ticket #12059: TimeField wrongly handling datetime.datetime object.
- d = Donut(name='Apple Fritter')
- d.baked_time = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
- d.save()
-
- d2 = Donut.objects.get(name='Apple Fritter')
- self.assertEqual(d2.baked_time, datetime.time(16, 19, 59))
-
- def test_year_boundaries(self):
- """Year boundary tests (ticket #3689)"""
- d = Donut.objects.create(name='Date Test 2007',
- baked_date=datetime.datetime(year=2007, month=12, day=31),
- consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59))
- d1 = Donut.objects.create(name='Date Test 2006',
- baked_date=datetime.datetime(year=2006, month=1, day=1),
- consumed_at=datetime.datetime(year=2006, month=1, day=1))
-
- self.assertEqual("Date Test 2007",
- Donut.objects.filter(baked_date__year=2007)[0].name)
-
- self.assertEqual("Date Test 2006",
- Donut.objects.filter(baked_date__year=2006)[0].name)
-
- d2 = Donut.objects.create(name='Apple Fritter',
- consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59))
-
- self.assertEqual([u'Apple Fritter', u'Date Test 2007'],
- list(Donut.objects.filter(consumed_at__year=2007).order_by('name').values_list('name', flat=True)))
-
- self.assertEqual(0, Donut.objects.filter(consumed_at__year=2005).count())
- self.assertEqual(0, Donut.objects.filter(consumed_at__year=2008).count())
-
- def test_textfields_unicode(self):
- """Regression test for #10238: TextField values returned from the
- database should be unicode."""
- d = Donut.objects.create(name=u'Jelly Donut', review=u'Outstanding')
- newd = Donut.objects.get(id=d.id)
- self.assertTrue(isinstance(newd.review, unicode))
-
- @skipIfDBFeature('supports_timezones')
- def test_error_on_timezone(self):
- """Regression test for #8354: the MySQL and Oracle backends should raise
- an error if given a timezone-aware datetime object."""
- dt = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=utc)
- d = Donut(name='Bear claw', consumed_at=dt)
- self.assertRaises(ValueError, d.save)
- # ValueError: MySQL backend does not support timezone-aware datetimes.
-
- def test_datefield_auto_now_add(self):
- """Regression test for #10970, auto_now_add for DateField should store
- a Python datetime.date, not a datetime.datetime"""
- b = RumBaba.objects.create()
- # Verify we didn't break DateTimeField behavior
- self.assertTrue(isinstance(b.baked_timestamp, datetime.datetime))
- # We need to test this this way because datetime.datetime inherits
- # from datetime.date:
- self.assertTrue(isinstance(b.baked_date, datetime.date) and not isinstance(b.baked_date, datetime.datetime))
diff --git a/tests/django14/dates/models.py b/tests/django14/dates/models.py
deleted file mode 100644
index e1fc1e74..00000000
--- a/tests/django14/dates/models.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from django.db import models
-
-
-class Article(models.Model):
- title = models.CharField(max_length=100)
- pub_date = models.DateField()
-
- categories = models.ManyToManyField("Category", related_name="articles")
-
- def __unicode__(self):
- return self.title
-
-class Comment(models.Model):
- article = models.ForeignKey(Article, related_name="comments")
- text = models.TextField()
- pub_date = models.DateField()
- approval_date = models.DateField(null=True)
-
- def __unicode__(self):
- return 'Comment to %s (%s)' % (self.article.title, self.pub_date)
-
-class Category(models.Model):
- name = models.CharField(max_length=255)
diff --git a/tests/django14/dates/tests.py b/tests/django14/dates/tests.py
deleted file mode 100644
index de28cac4..00000000
--- a/tests/django14/dates/tests.py
+++ /dev/null
@@ -1,83 +0,0 @@
-from __future__ import absolute_import
-
-from datetime import datetime
-
-from django.test import TestCase
-
-from .models import Article, Comment, Category
-
-
-class DatesTests(TestCase):
- def test_related_model_traverse(self):
- a1 = Article.objects.create(
- title="First one",
- pub_date=datetime(2005, 7, 28),
- )
- a2 = Article.objects.create(
- title="Another one",
- pub_date=datetime(2010, 7, 28),
- )
- a3 = Article.objects.create(
- title="Third one, in the first day",
- pub_date=datetime(2005, 7, 28),
- )
-
- a1.comments.create(
- text="Im the HULK!",
- pub_date=datetime(2005, 7, 28),
- )
- a1.comments.create(
- text="HULK SMASH!",
- pub_date=datetime(2005, 7, 29),
- )
- a2.comments.create(
- text="LMAO",
- pub_date=datetime(2010, 7, 28),
- )
- a3.comments.create(
- text="+1",
- pub_date=datetime(2005, 8, 29),
- )
-
- c = Category.objects.create(name="serious-news")
- c.articles.add(a1, a3)
-
- self.assertQuerysetEqual(
- Comment.objects.dates("article__pub_date", "year"), [
- datetime(2005, 1, 1),
- datetime(2010, 1, 1),
- ],
- lambda d: d,
- )
- self.assertQuerysetEqual(
- Comment.objects.dates("article__pub_date", "month"), [
- datetime(2005, 7, 1),
- datetime(2010, 7, 1),
- ],
- lambda d: d
- )
- self.assertQuerysetEqual(
- Comment.objects.dates("article__pub_date", "day"), [
- datetime(2005, 7, 28),
- datetime(2010, 7, 28),
- ],
- lambda d: d
- )
- self.assertQuerysetEqual(
- Article.objects.dates("comments__pub_date", "day"), [
- datetime(2005, 7, 28),
- datetime(2005, 7, 29),
- datetime(2005, 8, 29),
- datetime(2010, 7, 28),
- ],
- lambda d: d
- )
- self.assertQuerysetEqual(
- Article.objects.dates("comments__approval_date", "day"), []
- )
- self.assertQuerysetEqual(
- Category.objects.dates("articles__pub_date", "day"), [
- datetime(2005, 7, 28),
- ],
- lambda d: d,
- )
diff --git a/tests/django14/db_typecasts/tests.py b/tests/django14/db_typecasts/tests.py
deleted file mode 100644
index 83bd1e68..00000000
--- a/tests/django14/db_typecasts/tests.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Unit tests for typecast functions in django.db.backends.util
-
-import datetime
-
-from django.db.backends import util as typecasts
-from django.utils import unittest
-
-
-TEST_CASES = {
- 'typecast_date': (
- ('', None),
- (None, None),
- ('2005-08-11', datetime.date(2005, 8, 11)),
- ('1990-01-01', datetime.date(1990, 1, 1)),
- ),
- 'typecast_time': (
- ('', None),
- (None, None),
- ('0:00:00', datetime.time(0, 0)),
- ('0:30:00', datetime.time(0, 30)),
- ('8:50:00', datetime.time(8, 50)),
- ('08:50:00', datetime.time(8, 50)),
- ('12:00:00', datetime.time(12, 00)),
- ('12:30:00', datetime.time(12, 30)),
- ('13:00:00', datetime.time(13, 00)),
- ('23:59:00', datetime.time(23, 59)),
- ('00:00:12', datetime.time(0, 0, 12)),
- ('00:00:12.5', datetime.time(0, 0, 12, 500000)),
- ('7:22:13.312', datetime.time(7, 22, 13, 312000)),
- ),
- 'typecast_timestamp': (
- ('', None),
- (None, None),
- ('2005-08-11 0:00:00', datetime.datetime(2005, 8, 11)),
- ('2005-08-11 0:30:00', datetime.datetime(2005, 8, 11, 0, 30)),
- ('2005-08-11 8:50:30', datetime.datetime(2005, 8, 11, 8, 50, 30)),
- ('2005-08-11 8:50:30.123', datetime.datetime(2005, 8, 11, 8, 50, 30, 123000)),
- ('2005-08-11 8:50:30.9', datetime.datetime(2005, 8, 11, 8, 50, 30, 900000)),
- ('2005-08-11 8:50:30.312-05', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
- ('2005-08-11 8:50:30.312+02', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
- # ticket 14453
- ('2010-10-12 15:29:22.063202', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
- ('2010-10-12 15:29:22.063202-03', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
- ('2010-10-12 15:29:22.063202+04', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
- ('2010-10-12 15:29:22.0632021', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
- ('2010-10-12 15:29:22.0632029', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
- ),
-}
-
-class DBTypeCasts(unittest.TestCase):
- def test_typeCasts(self):
- for k, v in TEST_CASES.iteritems():
- for inpt, expected in v:
- got = getattr(typecasts, k)(inpt)
- self.assertEqual(got, expected, "In %s: %r doesn't match %r. Got %r instead." % (k, inpt, expected, got))
diff --git a/tests/django14/defer/models.py b/tests/django14/defer/models.py
deleted file mode 100644
index c64becf9..00000000
--- a/tests/django14/defer/models.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Tests for defer() and only().
-"""
-
-from django.db import models
-
-
-class Secondary(models.Model):
- first = models.CharField(max_length=50)
- second = models.CharField(max_length=50)
-
-class Primary(models.Model):
- name = models.CharField(max_length=50)
- value = models.CharField(max_length=50)
- related = models.ForeignKey(Secondary)
-
- def __unicode__(self):
- return self.name
-
-class Child(Primary):
- pass
-
-class BigChild(Primary):
- other = models.CharField(max_length=50)
-
-class ChildProxy(Child):
- class Meta:
- proxy=True
diff --git a/tests/django14/defer/tests.py b/tests/django14/defer/tests.py
deleted file mode 100644
index 09138293..00000000
--- a/tests/django14/defer/tests.py
+++ /dev/null
@@ -1,160 +0,0 @@
-from __future__ import absolute_import
-
-from django.db.models.query_utils import DeferredAttribute
-from django.test import TestCase
-
-from .models import Secondary, Primary, Child, BigChild, ChildProxy
-
-
-class DeferTests(TestCase):
- def assert_delayed(self, obj, num):
- count = 0
- for field in obj._meta.fields:
- if isinstance(obj.__class__.__dict__.get(field.attname),
- DeferredAttribute):
- count += 1
- self.assertEqual(count, num)
-
- def test_defer(self):
- # To all outward appearances, instances with deferred fields look the
- # same as normal instances when we examine attribute values. Therefore
- # we test for the number of deferred fields on returned instances (by
- # poking at the internals), as a way to observe what is going on.
-
- s1 = Secondary.objects.create(first="x1", second="y1")
- p1 = Primary.objects.create(name="p1", value="xx", related=s1)
-
- qs = Primary.objects.all()
-
- self.assert_delayed(qs.defer("name")[0], 1)
- self.assert_delayed(qs.only("name")[0], 2)
- self.assert_delayed(qs.defer("related__first")[0], 0)
-
- # Using 'pk' with only() should result in 3 deferred fields, namely all
- # of them except the model's primary key see #15494
- self.assert_delayed(qs.only("pk")[0], 3)
-
- obj = qs.select_related().only("related__first")[0]
- self.assert_delayed(obj, 2)
-
- self.assertEqual(obj.related_id, s1.pk)
-
- # You can use 'pk' with reverse foreign key lookups.
- self.assert_delayed(s1.primary_set.all().only('pk')[0], 3)
-
- self.assert_delayed(qs.defer("name").extra(select={"a": 1})[0], 1)
- self.assert_delayed(qs.extra(select={"a": 1}).defer("name")[0], 1)
- self.assert_delayed(qs.defer("name").defer("value")[0], 2)
- self.assert_delayed(qs.only("name").only("value")[0], 2)
- self.assert_delayed(qs.only("name").defer("value")[0], 2)
- self.assert_delayed(qs.only("name", "value").defer("value")[0], 2)
- self.assert_delayed(qs.defer("name").only("value")[0], 2)
-
- obj = qs.only()[0]
- self.assert_delayed(qs.defer(None)[0], 0)
- self.assert_delayed(qs.only("name").defer(None)[0], 0)
-
- # User values() won't defer anything (you get the full list of
- # dictionaries back), but it still works.
- self.assertEqual(qs.defer("name").values()[0], {
- "id": p1.id,
- "name": "p1",
- "value": "xx",
- "related_id": s1.id,
- })
- self.assertEqual(qs.only("name").values()[0], {
- "id": p1.id,
- "name": "p1",
- "value": "xx",
- "related_id": s1.id,
- })
-
- # Using defer() and only() with get() is also valid.
- self.assert_delayed(qs.defer("name").get(pk=p1.pk), 1)
- self.assert_delayed(qs.only("name").get(pk=p1.pk), 2)
-
- # DOES THIS WORK?
- self.assert_delayed(qs.only("name").select_related("related")[0], 1)
- self.assert_delayed(qs.defer("related").select_related("related")[0], 0)
-
- # Saving models with deferred fields is possible (but inefficient,
- # since every field has to be retrieved first).
- obj = Primary.objects.defer("value").get(name="p1")
- obj.name = "a new name"
- obj.save()
- self.assertQuerysetEqual(
- Primary.objects.all(), [
- "a new name",
- ],
- lambda p: p.name
- )
-
- # Regression for #10572 - A subclass with no extra fields can defer
- # fields from the base class
- Child.objects.create(name="c1", value="foo", related=s1)
- # You can defer a field on a baseclass when the subclass has no fields
- obj = Child.objects.defer("value").get(name="c1")
- self.assert_delayed(obj, 1)
- self.assertEqual(obj.name, "c1")
- self.assertEqual(obj.value, "foo")
- obj.name = "c2"
- obj.save()
-
- # You can retrive a single column on a base class with no fields
- obj = Child.objects.only("name").get(name="c2")
- self.assert_delayed(obj, 3)
- self.assertEqual(obj.name, "c2")
- self.assertEqual(obj.value, "foo")
- obj.name = "cc"
- obj.save()
-
- BigChild.objects.create(name="b1", value="foo", related=s1, other="bar")
- # You can defer a field on a baseclass
- obj = BigChild.objects.defer("value").get(name="b1")
- self.assert_delayed(obj, 1)
- self.assertEqual(obj.name, "b1")
- self.assertEqual(obj.value, "foo")
- self.assertEqual(obj.other, "bar")
- obj.name = "b2"
- obj.save()
-
- # You can defer a field on a subclass
- obj = BigChild.objects.defer("other").get(name="b2")
- self.assert_delayed(obj, 1)
- self.assertEqual(obj.name, "b2")
- self.assertEqual(obj.value, "foo")
- self.assertEqual(obj.other, "bar")
- obj.name = "b3"
- obj.save()
-
- # You can retrieve a single field on a baseclass
- obj = BigChild.objects.only("name").get(name="b3")
- self.assert_delayed(obj, 4)
- self.assertEqual(obj.name, "b3")
- self.assertEqual(obj.value, "foo")
- self.assertEqual(obj.other, "bar")
- obj.name = "b4"
- obj.save()
-
- # You can retrieve a single field on a baseclass
- obj = BigChild.objects.only("other").get(name="b4")
- self.assert_delayed(obj, 4)
- self.assertEqual(obj.name, "b4")
- self.assertEqual(obj.value, "foo")
- self.assertEqual(obj.other, "bar")
- obj.name = "bb"
- obj.save()
-
- def test_defer_proxy(self):
- """
- Ensure select_related together with only on a proxy model behaves
- as expected. See #17876.
- """
- related = Secondary.objects.create(first='x1', second='x2')
- ChildProxy.objects.create(name='p1', value='xx', related=related)
- children = ChildProxy.objects.all().select_related().only('id', 'name')
- self.assertEqual(len(children), 1)
- child = children[0]
- self.assert_delayed(child, 1)
- self.assertEqual(child.name, 'p1')
- self.assertEqual(child.value, 'xx')
diff --git a/tests/django14/defer_regress/models.py b/tests/django14/defer_regress/models.py
deleted file mode 100644
index 812d2da2..00000000
--- a/tests/django14/defer_regress/models.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-Regression tests for defer() / only() behavior.
-"""
-
-from django.db import models
-
-
-class Item(models.Model):
- name = models.CharField(max_length=15)
- text = models.TextField(default="xyzzy")
- value = models.IntegerField()
- other_value = models.IntegerField(default=0)
-
- def __unicode__(self):
- return self.name
-
-class RelatedItem(models.Model):
- item = models.ForeignKey(Item)
-
-class Child(models.Model):
- name = models.CharField(max_length=10)
- value = models.IntegerField()
-
-class Leaf(models.Model):
- name = models.CharField(max_length=10)
- child = models.ForeignKey(Child)
- second_child = models.ForeignKey(Child, related_name="other", null=True)
- value = models.IntegerField(default=42)
-
- def __unicode__(self):
- return self.name
-
-class ResolveThis(models.Model):
- num = models.FloatField()
- name = models.CharField(max_length=16)
-
-class Proxy(Item):
- class Meta:
- proxy = True
-
-class SimpleItem(models.Model):
- name = models.CharField(max_length=15)
- value = models.IntegerField()
-
- def __unicode__(self):
- return self.name
-
-class Feature(models.Model):
- item = models.ForeignKey(SimpleItem)
diff --git a/tests/django14/defer_regress/tests.py b/tests/django14/defer_regress/tests.py
deleted file mode 100644
index 4afe39be..00000000
--- a/tests/django14/defer_regress/tests.py
+++ /dev/null
@@ -1,176 +0,0 @@
-from __future__ import with_statement, absolute_import
-
-from operator import attrgetter
-
-from django.contrib.contenttypes.models import ContentType
-from django.contrib.sessions.backends.db import SessionStore
-from django.db.models import Count
-from django.db.models.loading import cache
-from django.test import TestCase
-
-from .models import (ResolveThis, Item, RelatedItem, Child, Leaf, Proxy,
- SimpleItem, Feature)
-
-
-class DeferRegressionTest(TestCase):
- def test_basic(self):
- # Deferred fields should really be deferred and not accidentally use
- # the field's default value just because they aren't passed to __init__
-
- Item.objects.create(name="first", value=42)
- obj = Item.objects.only("name", "other_value").get(name="first")
- # Accessing "name" doesn't trigger a new database query. Accessing
- # "value" or "text" should.
- with self.assertNumQueries(0):
- self.assertEqual(obj.name, "first")
- self.assertEqual(obj.other_value, 0)
-
- with self.assertNumQueries(1):
- self.assertEqual(obj.value, 42)
-
- with self.assertNumQueries(1):
- self.assertEqual(obj.text, "xyzzy")
-
- with self.assertNumQueries(0):
- self.assertEqual(obj.text, "xyzzy")
-
- # Regression test for #10695. Make sure different instances don't
- # inadvertently share data in the deferred descriptor objects.
- i = Item.objects.create(name="no I'm first", value=37)
- items = Item.objects.only("value").order_by("-value")
- self.assertEqual(items[0].name, "first")
- self.assertEqual(items[1].name, "no I'm first")
-
- RelatedItem.objects.create(item=i)
- r = RelatedItem.objects.defer("item").get()
- self.assertEqual(r.item_id, i.id)
- self.assertEqual(r.item, i)
-
- # Some further checks for select_related() and inherited model
- # behavior (regression for #10710).
- c1 = Child.objects.create(name="c1", value=42)
- c2 = Child.objects.create(name="c2", value=37)
- Leaf.objects.create(name="l1", child=c1, second_child=c2)
-
- obj = Leaf.objects.only("name", "child").select_related()[0]
- self.assertEqual(obj.child.name, "c1")
-
- self.assertQuerysetEqual(
- Leaf.objects.select_related().only("child__name", "second_child__name"), [
- "l1",
- ],
- attrgetter("name")
- )
-
- # Models instances with deferred fields should still return the same
- # content types as their non-deferred versions (bug #10738).
- ctype = ContentType.objects.get_for_model
- c1 = ctype(Item.objects.all()[0])
- c2 = ctype(Item.objects.defer("name")[0])
- c3 = ctype(Item.objects.only("name")[0])
- self.assertTrue(c1 is c2 is c3)
-
- # Regression for #10733 - only() can be used on a model with two
- # foreign keys.
- results = Leaf.objects.only("name", "child", "second_child").select_related()
- self.assertEqual(results[0].child.name, "c1")
- self.assertEqual(results[0].second_child.name, "c2")
-
- results = Leaf.objects.only("name", "child", "second_child", "child__name", "second_child__name").select_related()
- self.assertEqual(results[0].child.name, "c1")
- self.assertEqual(results[0].second_child.name, "c2")
-
- # Test for #12163 - Pickling error saving session with unsaved model
- # instances.
- SESSION_KEY = '2b1189a188b44ad18c35e1baac6ceead'
-
- item = Item()
- item._deferred = False
- s = SessionStore(SESSION_KEY)
- s.clear()
- s["item"] = item
- s.save()
-
- s = SessionStore(SESSION_KEY)
- s.modified = True
- s.save()
-
- i2 = s["item"]
- self.assertFalse(i2._deferred)
-
- # Regression for #11936 - loading.get_models should not return deferred
- # models by default.
- klasses = sorted(
- cache.get_models(cache.get_app("defer_regress")),
- key=lambda klass: klass.__name__
- )
- self.assertEqual(
- klasses, [
- Child,
- Feature,
- Item,
- Leaf,
- Proxy,
- RelatedItem,
- ResolveThis,
- SimpleItem,
- ]
- )
-
- klasses = sorted(
- map(
- attrgetter("__name__"),
- cache.get_models(
- cache.get_app("defer_regress"), include_deferred=True
- ),
- )
- )
- self.assertEqual(
- klasses, [
- "Child",
- "Child_Deferred_value",
- "Feature",
- "Item",
- "Item_Deferred_name",
- "Item_Deferred_name_other_value_text",
- "Item_Deferred_name_other_value_value",
- "Item_Deferred_other_value_text_value",
- "Item_Deferred_text_value",
- "Leaf",
- "Leaf_Deferred_child_id_second_child_id_value",
- "Leaf_Deferred_name_value",
- "Leaf_Deferred_second_child_value",
- "Leaf_Deferred_value",
- "Proxy",
- "RelatedItem",
- "RelatedItem_Deferred_",
- "RelatedItem_Deferred_item_id",
- "ResolveThis",
- "SimpleItem",
- ]
- )
-
- # Regression for #16409 - make sure defer() and only() work with annotate()
- self.assertIsInstance(list(SimpleItem.objects.annotate(Count('feature')).defer('name')), list)
- self.assertIsInstance(list(SimpleItem.objects.annotate(Count('feature')).only('name')), list)
-
- def test_only_and_defer_usage_on_proxy_models(self):
- # Regression for #15790 - only() broken for proxy models
- proxy = Proxy.objects.create(name="proxy", value=42)
-
- msg = 'QuerySet.only() return bogus results with proxy models'
- dp = Proxy.objects.only('other_value').get(pk=proxy.pk)
- self.assertEqual(dp.name, proxy.name, msg=msg)
- self.assertEqual(dp.value, proxy.value, msg=msg)
-
- # also test things with .defer()
- msg = 'QuerySet.defer() return bogus results with proxy models'
- dp = Proxy.objects.defer('name', 'text', 'value').get(pk=proxy.pk)
- self.assertEqual(dp.name, proxy.name, msg=msg)
- self.assertEqual(dp.value, proxy.value, msg=msg)
-
- def test_resolve_columns(self):
- rt = ResolveThis.objects.create(num=5.0, name='Foobar')
- qs = ResolveThis.objects.defer('num')
- self.assertEqual(1, qs.count())
- self.assertEqual('Foobar', qs[0].name)
diff --git a/tests/django14/delete/models.py b/tests/django14/delete/models.py
deleted file mode 100644
index f8b78eb7..00000000
--- a/tests/django14/delete/models.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from django.db import models
-
-
-class R(models.Model):
- is_default = models.BooleanField(default=False)
-
- def __str__(self):
- return "%s" % self.pk
-
-
-get_default_r = lambda: R.objects.get_or_create(is_default=True)[0]
-
-
-class S(models.Model):
- r = models.ForeignKey(R)
-
-
-class T(models.Model):
- s = models.ForeignKey(S)
-
-
-class U(models.Model):
- t = models.ForeignKey(T)
-
-
-class RChild(R):
- pass
-
-
-class A(models.Model):
- name = models.CharField(max_length=30)
-
- auto = models.ForeignKey(R, related_name="auto_set")
- auto_nullable = models.ForeignKey(R, null=True,
- related_name='auto_nullable_set')
- setvalue = models.ForeignKey(R, on_delete=models.SET(get_default_r),
- related_name='setvalue')
- setnull = models.ForeignKey(R, on_delete=models.SET_NULL, null=True,
- related_name='setnull_set')
- setdefault = models.ForeignKey(R, on_delete=models.SET_DEFAULT,
- default=get_default_r, related_name='setdefault_set')
- setdefault_none = models.ForeignKey(R, on_delete=models.SET_DEFAULT,
- default=None, null=True, related_name='setnull_nullable_set')
- cascade = models.ForeignKey(R, on_delete=models.CASCADE,
- related_name='cascade_set')
- cascade_nullable = models.ForeignKey(R, on_delete=models.CASCADE, null=True,
- related_name='cascade_nullable_set')
- protect = models.ForeignKey(R, on_delete=models.PROTECT, null=True)
- donothing = models.ForeignKey(R, on_delete=models.DO_NOTHING, null=True,
- related_name='donothing_set')
- child = models.ForeignKey(RChild, related_name="child")
- child_setnull = models.ForeignKey(RChild, on_delete=models.SET_NULL, null=True,
- related_name="child_setnull")
-
- # A OneToOneField is just a ForeignKey unique=True, so we don't duplicate
- # all the tests; just one smoke test to ensure on_delete works for it as
- # well.
- o2o_setnull = models.ForeignKey(R, null=True,
- on_delete=models.SET_NULL, related_name="o2o_nullable_set")
-
-
-def create_a(name):
- a = A(name=name)
- for name in ('auto', 'auto_nullable', 'setvalue', 'setnull', 'setdefault',
- 'setdefault_none', 'cascade', 'cascade_nullable', 'protect',
- 'donothing', 'o2o_setnull'):
- r = R.objects.create()
- setattr(a, name, r)
- a.child = RChild.objects.create()
- a.child_setnull = RChild.objects.create()
- a.save()
- return a
-
-
-class M(models.Model):
- m2m = models.ManyToManyField(R, related_name="m_set")
- m2m_through = models.ManyToManyField(R, through="MR",
- related_name="m_through_set")
- m2m_through_null = models.ManyToManyField(R, through="MRNull",
- related_name="m_through_null_set")
-
-
-class MR(models.Model):
- m = models.ForeignKey(M)
- r = models.ForeignKey(R)
-
-
-class MRNull(models.Model):
- m = models.ForeignKey(M)
- r = models.ForeignKey(R, null=True, on_delete=models.SET_NULL)
-
-
-class Avatar(models.Model):
- pass
-
-
-class User(models.Model):
- avatar = models.ForeignKey(Avatar, null=True)
-
-
-class HiddenUser(models.Model):
- r = models.ForeignKey(R, related_name="+")
-
-
-class HiddenUserProfile(models.Model):
- user = models.ForeignKey(HiddenUser)
diff --git a/tests/django14/delete/tests.py b/tests/django14/delete/tests.py
deleted file mode 100644
index d681a76f..00000000
--- a/tests/django14/delete/tests.py
+++ /dev/null
@@ -1,255 +0,0 @@
-from __future__ import absolute_import
-
-from django.db import models, IntegrityError
-from django.test import TestCase, skipUnlessDBFeature, skipIfDBFeature
-
-from .models import (R, RChild, S, T, U, A, M, MR, MRNull,
- create_a, get_default_r, User, Avatar, HiddenUser, HiddenUserProfile)
-
-
-class OnDeleteTests(TestCase):
- def setUp(self):
- self.DEFAULT = get_default_r()
-
- def test_auto(self):
- a = create_a('auto')
- a.auto.delete()
- self.assertFalse(A.objects.filter(name='auto').exists())
-
- def test_auto_nullable(self):
- a = create_a('auto_nullable')
- a.auto_nullable.delete()
- self.assertFalse(A.objects.filter(name='auto_nullable').exists())
-
- def test_setvalue(self):
- a = create_a('setvalue')
- a.setvalue.delete()
- a = A.objects.get(pk=a.pk)
- self.assertEqual(self.DEFAULT, a.setvalue)
-
- def test_setnull(self):
- a = create_a('setnull')
- a.setnull.delete()
- a = A.objects.get(pk=a.pk)
- self.assertEqual(None, a.setnull)
-
- def test_setdefault(self):
- a = create_a('setdefault')
- a.setdefault.delete()
- a = A.objects.get(pk=a.pk)
- self.assertEqual(self.DEFAULT, a.setdefault)
-
- def test_setdefault_none(self):
- a = create_a('setdefault_none')
- a.setdefault_none.delete()
- a = A.objects.get(pk=a.pk)
- self.assertEqual(None, a.setdefault_none)
-
- def test_cascade(self):
- a = create_a('cascade')
- a.cascade.delete()
- self.assertFalse(A.objects.filter(name='cascade').exists())
-
- def test_cascade_nullable(self):
- a = create_a('cascade_nullable')
- a.cascade_nullable.delete()
- self.assertFalse(A.objects.filter(name='cascade_nullable').exists())
-
- def test_protect(self):
- a = create_a('protect')
- self.assertRaises(IntegrityError, a.protect.delete)
-
- def test_do_nothing(self):
- # Testing DO_NOTHING is a bit harder: It would raise IntegrityError for a normal model,
- # so we connect to pre_delete and set the fk to a known value.
- replacement_r = R.objects.create()
- def check_do_nothing(sender, **kwargs):
- obj = kwargs['instance']
- obj.donothing_set.update(donothing=replacement_r)
- models.signals.pre_delete.connect(check_do_nothing)
- a = create_a('do_nothing')
- a.donothing.delete()
- a = A.objects.get(pk=a.pk)
- self.assertEqual(replacement_r, a.donothing)
- models.signals.pre_delete.disconnect(check_do_nothing)
-
- def test_inheritance_cascade_up(self):
- child = RChild.objects.create()
- child.delete()
- self.assertFalse(R.objects.filter(pk=child.pk).exists())
-
- def test_inheritance_cascade_down(self):
- child = RChild.objects.create()
- parent = child.r_ptr
- parent.delete()
- self.assertFalse(RChild.objects.filter(pk=child.pk).exists())
-
- def test_cascade_from_child(self):
- a = create_a('child')
- a.child.delete()
- self.assertFalse(A.objects.filter(name='child').exists())
- self.assertFalse(R.objects.filter(pk=a.child_id).exists())
-
- def test_cascade_from_parent(self):
- a = create_a('child')
- R.objects.get(pk=a.child_id).delete()
- self.assertFalse(A.objects.filter(name='child').exists())
- self.assertFalse(RChild.objects.filter(pk=a.child_id).exists())
-
- def test_setnull_from_child(self):
- a = create_a('child_setnull')
- a.child_setnull.delete()
- self.assertFalse(R.objects.filter(pk=a.child_setnull_id).exists())
-
- a = A.objects.get(pk=a.pk)
- self.assertEqual(None, a.child_setnull)
-
- def test_setnull_from_parent(self):
- a = create_a('child_setnull')
- R.objects.get(pk=a.child_setnull_id).delete()
- self.assertFalse(RChild.objects.filter(pk=a.child_setnull_id).exists())
-
- a = A.objects.get(pk=a.pk)
- self.assertEqual(None, a.child_setnull)
-
- def test_o2o_setnull(self):
- a = create_a('o2o_setnull')
- a.o2o_setnull.delete()
- a = A.objects.get(pk=a.pk)
- self.assertEqual(None, a.o2o_setnull)
-
-
-class DeletionTests(TestCase):
- def test_m2m(self):
- m = M.objects.create()
- r = R.objects.create()
- MR.objects.create(m=m, r=r)
- r.delete()
- self.assertFalse(MR.objects.exists())
-
- r = R.objects.create()
- MR.objects.create(m=m, r=r)
- m.delete()
- self.assertFalse(MR.objects.exists())
-
- m = M.objects.create()
- r = R.objects.create()
- m.m2m.add(r)
- r.delete()
- through = M._meta.get_field('m2m').rel.through
- self.assertFalse(through.objects.exists())
-
- r = R.objects.create()
- m.m2m.add(r)
- m.delete()
- self.assertFalse(through.objects.exists())
-
- m = M.objects.create()
- r = R.objects.create()
- MRNull.objects.create(m=m, r=r)
- r.delete()
- self.assertFalse(not MRNull.objects.exists())
- self.assertFalse(m.m2m_through_null.exists())
-
- def test_bulk(self):
- from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE
- s = S.objects.create(r=R.objects.create())
- for i in xrange(2*GET_ITERATOR_CHUNK_SIZE):
- T.objects.create(s=s)
- # 1 (select related `T` instances)
- # + 1 (select related `U` instances)
- # + 2 (delete `T` instances in batches)
- # + 1 (delete `s`)
- self.assertNumQueries(5, s.delete)
- self.assertFalse(S.objects.exists())
-
- def test_instance_update(self):
- deleted = []
- related_setnull_sets = []
- def pre_delete(sender, **kwargs):
- obj = kwargs['instance']
- deleted.append(obj)
- if isinstance(obj, R):
- related_setnull_sets.append(list(a.pk for a in obj.setnull_set.all()))
-
- models.signals.pre_delete.connect(pre_delete)
- a = create_a('update_setnull')
- a.setnull.delete()
-
- a = create_a('update_cascade')
- a.cascade.delete()
-
- for obj in deleted:
- self.assertEqual(None, obj.pk)
-
- for pk_list in related_setnull_sets:
- for a in A.objects.filter(id__in=pk_list):
- self.assertEqual(None, a.setnull)
-
- models.signals.pre_delete.disconnect(pre_delete)
-
- def test_deletion_order(self):
- pre_delete_order = []
- post_delete_order = []
-
- def log_post_delete(sender, **kwargs):
- pre_delete_order.append((sender, kwargs['instance'].pk))
-
- def log_pre_delete(sender, **kwargs):
- post_delete_order.append((sender, kwargs['instance'].pk))
-
- models.signals.post_delete.connect(log_post_delete)
- models.signals.pre_delete.connect(log_pre_delete)
-
- r = R.objects.create(pk=1)
- s1 = S.objects.create(pk=1, r=r)
- s2 = S.objects.create(pk=2, r=r)
- t1 = T.objects.create(pk=1, s=s1)
- t2 = T.objects.create(pk=2, s=s2)
- r.delete()
- self.assertEqual(
- pre_delete_order, [(T, 2), (T, 1), (S, 2), (S, 1), (R, 1)]
- )
- self.assertEqual(
- post_delete_order, [(T, 1), (T, 2), (S, 1), (S, 2), (R, 1)]
- )
-
- models.signals.post_delete.disconnect(log_post_delete)
- models.signals.post_delete.disconnect(log_pre_delete)
-
- @skipUnlessDBFeature("can_defer_constraint_checks")
- def test_can_defer_constraint_checks(self):
- u = User.objects.create(
- avatar=Avatar.objects.create()
- )
- a = Avatar.objects.get(pk=u.avatar_id)
- # 1 query to find the users for the avatar.
- # 1 query to delete the user
- # 1 query to delete the avatar
- # The important thing is that when we can defer constraint checks there
- # is no need to do an UPDATE on User.avatar to null it out.
- self.assertNumQueries(3, a.delete)
- self.assertFalse(User.objects.exists())
- self.assertFalse(Avatar.objects.exists())
-
- @skipIfDBFeature("can_defer_constraint_checks")
- def test_cannot_defer_constraint_checks(self):
- u = User.objects.create(
- avatar=Avatar.objects.create()
- )
- a = Avatar.objects.get(pk=u.avatar_id)
- # 1 query to find the users for the avatar.
- # 1 query to delete the user
- # 1 query to null out user.avatar, because we can't defer the constraint
- # 1 query to delete the avatar
- self.assertNumQueries(4, a.delete)
- self.assertFalse(User.objects.exists())
- self.assertFalse(Avatar.objects.exists())
-
- def test_hidden_related(self):
- r = R.objects.create()
- h = HiddenUser.objects.create(r=r)
- p = HiddenUserProfile.objects.create(user=h)
-
- r.delete()
- self.assertEqual(HiddenUserProfile.objects.count(), 0)
diff --git a/tests/django14/delete_regress/models.py b/tests/django14/delete_regress/models.py
deleted file mode 100644
index 5db253f7..00000000
--- a/tests/django14/delete_regress/models.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from django.contrib.contenttypes import generic
-from django.contrib.contenttypes.models import ContentType
-from django.db import models
-
-
-class Award(models.Model):
- name = models.CharField(max_length=25)
- object_id = models.PositiveIntegerField()
- content_type = models.ForeignKey(ContentType)
- content_object = generic.GenericForeignKey()
-
-class AwardNote(models.Model):
- award = models.ForeignKey(Award)
- note = models.CharField(max_length=100)
-
-class Person(models.Model):
- name = models.CharField(max_length=25)
- awards = generic.GenericRelation(Award)
-
-class Book(models.Model):
- pagecount = models.IntegerField()
-
-class Toy(models.Model):
- name = models.CharField(max_length=50)
-
-class Child(models.Model):
- name = models.CharField(max_length=50)
- toys = models.ManyToManyField(Toy, through='PlayedWith')
-
-class PlayedWith(models.Model):
- child = models.ForeignKey(Child)
- toy = models.ForeignKey(Toy)
- date = models.DateField(db_column='date_col')
-
-class PlayedWithNote(models.Model):
- played = models.ForeignKey(PlayedWith)
- note = models.TextField()
-
-class Contact(models.Model):
- label = models.CharField(max_length=100)
-
-class Email(Contact):
- email_address = models.EmailField(max_length=100)
-
-class Researcher(models.Model):
- contacts = models.ManyToManyField(Contact, related_name="research_contacts")
-
-class Food(models.Model):
- name = models.CharField(max_length=20, unique=True)
-
-class Eaten(models.Model):
- food = models.ForeignKey(Food, to_field="name")
- meal = models.CharField(max_length=20)
-
-
-# Models for #15776
-
-class Policy(models.Model):
- policy_number = models.CharField(max_length=10)
-
-class Version(models.Model):
- policy = models.ForeignKey(Policy)
-
-class Location(models.Model):
- version = models.ForeignKey(Version, blank=True, null=True)
-
-class Item(models.Model):
- version = models.ForeignKey(Version)
- location = models.ForeignKey(Location, blank=True, null=True)
-
-# Models for #16128
-
-class File(models.Model):
- pass
-
-class Image(File):
- class Meta:
- proxy = True
-
-class Photo(Image):
- class Meta:
- proxy = True
-
-class FooImage(models.Model):
- my_image = models.ForeignKey(Image)
-
-class FooFile(models.Model):
- my_file = models.ForeignKey(File)
-
-class FooPhoto(models.Model):
- my_photo = models.ForeignKey(Photo)
-
-class FooFileProxy(FooFile):
- class Meta:
- proxy = True
diff --git a/tests/django14/delete_regress/tests.py b/tests/django14/delete_regress/tests.py
deleted file mode 100644
index 32feae2d..00000000
--- a/tests/django14/delete_regress/tests.py
+++ /dev/null
@@ -1,260 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-
-from django.conf import settings
-from django.db import backend, transaction, DEFAULT_DB_ALIAS
-from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
-
-from .models import (Book, Award, AwardNote, Person, Child, Toy, PlayedWith,
- PlayedWithNote, Email, Researcher, Food, Eaten, Policy, Version, Location,
- Item, Image, File, Photo, FooFile, FooImage, FooPhoto, FooFileProxy)
-
-
-# Can't run this test under SQLite, because you can't
-# get two connections to an in-memory database.
-class DeleteLockingTest(TransactionTestCase):
- def setUp(self):
- # Create a second connection to the default database
- conn_settings = settings.DATABASES[DEFAULT_DB_ALIAS]
- self.conn2 = backend.DatabaseWrapper({
- 'HOST': conn_settings['HOST'],
- 'NAME': conn_settings['NAME'],
- 'OPTIONS': conn_settings['OPTIONS'],
- 'PASSWORD': conn_settings['PASSWORD'],
- 'PORT': conn_settings['PORT'],
- 'USER': conn_settings['USER'],
- 'TIME_ZONE': settings.TIME_ZONE,
- })
-
- # Put both DB connections into managed transaction mode
- transaction.enter_transaction_management()
- transaction.managed(True)
- self.conn2._enter_transaction_management(True)
-
- def tearDown(self):
- # Close down the second connection.
- transaction.leave_transaction_management()
- self.conn2.close()
-
- @skipUnlessDBFeature('test_db_allows_multiple_connections')
- def test_concurrent_delete(self):
- "Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
-
- # Create some dummy data
- b1 = Book(id=1, pagecount=100)
- b2 = Book(id=2, pagecount=200)
- b3 = Book(id=3, pagecount=300)
- b1.save()
- b2.save()
- b3.save()
-
- transaction.commit()
-
- self.assertEqual(3, Book.objects.count())
-
- # Delete something using connection 2.
- cursor2 = self.conn2.cursor()
- cursor2.execute('DELETE from delete_regress_book WHERE id=1')
- self.conn2._commit()
-
- # Now perform a queryset delete that covers the object
- # deleted in connection 2. This causes an infinite loop
- # under MySQL InnoDB unless we keep track of already
- # deleted objects.
- Book.objects.filter(pagecount__lt=250).delete()
- transaction.commit()
- self.assertEqual(1, Book.objects.count())
- transaction.commit()
-
-
-class DeleteCascadeTests(TestCase):
- def test_generic_relation_cascade(self):
- """
- Django cascades deletes through generic-related objects to their
- reverse relations.
-
- """
- person = Person.objects.create(name='Nelson Mandela')
- award = Award.objects.create(name='Nobel', content_object=person)
- note = AwardNote.objects.create(note='a peace prize',
- award=award)
- self.assertEqual(AwardNote.objects.count(), 1)
- person.delete()
- self.assertEqual(Award.objects.count(), 0)
- # first two asserts are just sanity checks, this is the kicker:
- self.assertEqual(AwardNote.objects.count(), 0)
-
- def test_fk_to_m2m_through(self):
- """
- If an M2M relationship has an explicitly-specified through model, and
- some other model has an FK to that through model, deletion is cascaded
- from one of the participants in the M2M, to the through model, to its
- related model.
-
- """
- juan = Child.objects.create(name='Juan')
- paints = Toy.objects.create(name='Paints')
- played = PlayedWith.objects.create(child=juan, toy=paints,
- date=datetime.date.today())
- note = PlayedWithNote.objects.create(played=played,
- note='the next Jackson Pollock')
- self.assertEqual(PlayedWithNote.objects.count(), 1)
- paints.delete()
- self.assertEqual(PlayedWith.objects.count(), 0)
- # first two asserts just sanity checks, this is the kicker:
- self.assertEqual(PlayedWithNote.objects.count(), 0)
-
- def test_15776(self):
- policy = Policy.objects.create(pk=1, policy_number="1234")
- version = Version.objects.create(policy=policy)
- location = Location.objects.create(version=version)
- item = Item.objects.create(version=version, location=location)
- policy.delete()
-
-
-class DeleteCascadeTransactionTests(TransactionTestCase):
- def test_inheritance(self):
- """
- Auto-created many-to-many through tables referencing a parent model are
- correctly found by the delete cascade when a child of that parent is
- deleted.
-
- Refs #14896.
- """
- r = Researcher.objects.create()
- email = Email.objects.create(
- label="office-email", email_address="carl@science.edu"
- )
- r.contacts.add(email)
-
- email.delete()
-
- def test_to_field(self):
- """
- Cascade deletion works with ForeignKey.to_field set to non-PK.
-
- """
- apple = Food.objects.create(name="apple")
- eaten = Eaten.objects.create(food=apple, meal="lunch")
-
- apple.delete()
-
-class LargeDeleteTests(TestCase):
- def test_large_deletes(self):
- "Regression for #13309 -- if the number of objects > chunk size, deletion still occurs"
- for x in range(300):
- track = Book.objects.create(pagecount=x+100)
- Book.objects.all().delete()
- self.assertEqual(Book.objects.count(), 0)
-
-
-
-class ProxyDeleteTest(TestCase):
- """
- Tests on_delete behavior for proxy models.
-
- See #16128.
-
- """
- def create_image(self):
- """Return an Image referenced by both a FooImage and a FooFile."""
- # Create an Image
- test_image = Image()
- test_image.save()
- foo_image = FooImage(my_image=test_image)
- foo_image.save()
-
- # Get the Image instance as a File
- test_file = File.objects.get(pk=test_image.pk)
- foo_file = FooFile(my_file=test_file)
- foo_file.save()
-
- return test_image
-
-
- def test_delete_proxy(self):
- """
- Deleting the *proxy* instance bubbles through to its non-proxy and
- *all* referring objects are deleted.
-
- """
- self.create_image()
-
- Image.objects.all().delete()
-
- # An Image deletion == File deletion
- self.assertEqual(len(Image.objects.all()), 0)
- self.assertEqual(len(File.objects.all()), 0)
-
- # The Image deletion cascaded and *all* references to it are deleted.
- self.assertEqual(len(FooImage.objects.all()), 0)
- self.assertEqual(len(FooFile.objects.all()), 0)
-
-
- def test_delete_proxy_of_proxy(self):
- """
- Deleting a proxy-of-proxy instance should bubble through to its proxy
- and non-proxy parents, deleting *all* referring objects.
-
- """
- test_image = self.create_image()
-
- # Get the Image as a Photo
- test_photo = Photo.objects.get(pk=test_image.pk)
- foo_photo = FooPhoto(my_photo=test_photo)
- foo_photo.save()
-
- Photo.objects.all().delete()
-
- # A Photo deletion == Image deletion == File deletion
- self.assertEqual(len(Photo.objects.all()), 0)
- self.assertEqual(len(Image.objects.all()), 0)
- self.assertEqual(len(File.objects.all()), 0)
-
- # The Photo deletion should have cascaded and deleted *all*
- # references to it.
- self.assertEqual(len(FooPhoto.objects.all()), 0)
- self.assertEqual(len(FooFile.objects.all()), 0)
- self.assertEqual(len(FooImage.objects.all()), 0)
-
-
- def test_delete_concrete_parent(self):
- """
- Deleting an instance of a concrete model should also delete objects
- referencing its proxy subclass.
-
- """
- self.create_image()
-
- File.objects.all().delete()
-
- # A File deletion == Image deletion
- self.assertEqual(len(File.objects.all()), 0)
- self.assertEqual(len(Image.objects.all()), 0)
-
- # The File deletion should have cascaded and deleted *all* references
- # to it.
- self.assertEqual(len(FooFile.objects.all()), 0)
- self.assertEqual(len(FooImage.objects.all()), 0)
-
-
- def test_delete_proxy_pair(self):
- """
- If a pair of proxy models are linked by an FK from one concrete parent
- to the other, deleting one proxy model cascade-deletes the other, and
- the deletion happens in the right order (not triggering an
- IntegrityError on databases unable to defer integrity checks).
-
- Refs #17918.
-
- """
- # Create an Image (proxy of File) and FooFileProxy (proxy of FooFile,
- # which has an FK to File)
- image = Image.objects.create()
- as_file = File.objects.get(pk=image.pk)
- FooFileProxy.objects.create(my_file=as_file)
-
- Image.objects.all().delete()
-
- self.assertEqual(len(FooFileProxy.objects.all()), 0)
diff --git a/tests/django14/distinct_on_fields/models.py b/tests/django14/distinct_on_fields/models.py
deleted file mode 100644
index be0b5911..00000000
--- a/tests/django14/distinct_on_fields/models.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from django.db import models
-
-class Tag(models.Model):
- name = models.CharField(max_length=10)
- parent = models.ForeignKey('self', blank=True, null=True,
- related_name='children')
-
- class Meta:
- ordering = ['name']
-
- def __unicode__(self):
- return self.name
-
-class Celebrity(models.Model):
- name = models.CharField("Name", max_length=20)
- greatest_fan = models.ForeignKey("Fan", null=True, unique=True)
-
- def __unicode__(self):
- return self.name
-
-class Fan(models.Model):
- fan_of = models.ForeignKey(Celebrity)
-
-class Staff(models.Model):
- id = models.IntegerField(primary_key=True)
- name = models.CharField(max_length=50)
- organisation = models.CharField(max_length=100)
- tags = models.ManyToManyField(Tag, through='StaffTag')
- coworkers = models.ManyToManyField('self')
-
- def __unicode__(self):
- return self.name
-
-class StaffTag(models.Model):
- staff = models.ForeignKey(Staff)
- tag = models.ForeignKey(Tag)
-
- def __unicode__(self):
- return u"%s -> %s" % (self.tag, self.staff)
diff --git a/tests/django14/distinct_on_fields/tests.py b/tests/django14/distinct_on_fields/tests.py
deleted file mode 100644
index 5021bc80..00000000
--- a/tests/django14/distinct_on_fields/tests.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from __future__ import absolute_import, with_statement
-
-from django.db.models import Max
-from django.test import TestCase, skipUnlessDBFeature
-
-from .models import Tag, Celebrity, Fan, Staff, StaffTag
-
-class DistinctOnTests(TestCase):
- def setUp(self):
- t1 = Tag.objects.create(name='t1')
- t2 = Tag.objects.create(name='t2', parent=t1)
- t3 = Tag.objects.create(name='t3', parent=t1)
- t4 = Tag.objects.create(name='t4', parent=t3)
- t5 = Tag.objects.create(name='t5', parent=t3)
-
- p1_o1 = Staff.objects.create(id=1, name="p1", organisation="o1")
- p2_o1 = Staff.objects.create(id=2, name="p2", organisation="o1")
- p3_o1 = Staff.objects.create(id=3, name="p3", organisation="o1")
- p1_o2 = Staff.objects.create(id=4, name="p1", organisation="o2")
- p1_o1.coworkers.add(p2_o1, p3_o1)
- StaffTag.objects.create(staff=p1_o1, tag=t1)
- StaffTag.objects.create(staff=p1_o1, tag=t1)
-
- celeb1 = Celebrity.objects.create(name="c1")
- celeb2 = Celebrity.objects.create(name="c2")
-
- self.fan1 = Fan.objects.create(fan_of=celeb1)
- self.fan2 = Fan.objects.create(fan_of=celeb1)
- self.fan3 = Fan.objects.create(fan_of=celeb2)
-
- @skipUnlessDBFeature('can_distinct_on_fields')
- def test_basic_distinct_on(self):
- """QuerySet.distinct('field', ...) works"""
- # (qset, expected) tuples
- qsets = (
- (
- Staff.objects.distinct().order_by('name'),
- ['', '', '', ''],
- ),
- (
- Staff.objects.distinct('name').order_by('name'),
- ['', '', ''],
- ),
- (
- Staff.objects.distinct('organisation').order_by('organisation', 'name'),
- ['', ''],
- ),
- (
- Staff.objects.distinct('name', 'organisation').order_by('name', 'organisation'),
- ['', '', '', ''],
- ),
- (
- Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).\
- distinct('name').order_by('name'),
- ['', ''],
- ),
- # Does combining querysets work?
- (
- (Celebrity.objects.filter(fan__in=[self.fan1, self.fan2]).\
- distinct('name').order_by('name')
- |Celebrity.objects.filter(fan__in=[self.fan3]).\
- distinct('name').order_by('name')),
- ['', ''],
- ),
- (
- StaffTag.objects.distinct('staff','tag'),
- [' p1>'],
- ),
- (
- Tag.objects.order_by('parent__pk', 'pk').distinct('parent'),
- ['', '', ''],
- ),
- (
- StaffTag.objects.select_related('staff').distinct('staff__name').order_by('staff__name'),
- [' p1>'],
- ),
- # Fetch the alphabetically first coworker for each worker
- (
- (Staff.objects.distinct('id').order_by('id', 'coworkers__name').
- values_list('id', 'coworkers__name')),
- ["(1, u'p2')", "(2, u'p1')", "(3, u'p1')", "(4, None)"]
- ),
- )
- for qset, expected in qsets:
- self.assertQuerysetEqual(qset, expected)
- self.assertEqual(qset.count(), len(expected))
-
- # Combining queries with different distinct_fields is not allowed.
- base_qs = Celebrity.objects.all()
- self.assertRaisesMessage(
- AssertionError,
- "Cannot combine queries with different distinct fields.",
- lambda: (base_qs.distinct('id') & base_qs.distinct('name'))
- )
-
- # Test join unreffing
- c1 = Celebrity.objects.distinct('greatest_fan__id', 'greatest_fan__fan_of')
- self.assertIn('OUTER JOIN', str(c1.query))
- c2 = c1.distinct('pk')
- self.assertNotIn('OUTER JOIN', str(c2.query))
-
- @skipUnlessDBFeature('can_distinct_on_fields')
- def test_distinct_not_implemented_checks(self):
- # distinct + annotate not allowed
- with self.assertRaises(NotImplementedError):
- Celebrity.objects.annotate(Max('id')).distinct('id')[0]
- with self.assertRaises(NotImplementedError):
- Celebrity.objects.distinct('id').annotate(Max('id'))[0]
-
- # However this check is done only when the query executes, so you
- # can use distinct() to remove the fields before execution.
- Celebrity.objects.distinct('id').annotate(Max('id')).distinct()[0]
- # distinct + aggregate not allowed
- with self.assertRaises(NotImplementedError):
- Celebrity.objects.distinct('id').aggregate(Max('id'))
-
diff --git a/tests/django14/expressions/models.py b/tests/django14/expressions/models.py
deleted file mode 100644
index dd504999..00000000
--- a/tests/django14/expressions/models.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Tests for F() query expression syntax.
-"""
-
-from django.db import models
-
-
-class Employee(models.Model):
- firstname = models.CharField(max_length=50)
- lastname = models.CharField(max_length=50)
-
- def __unicode__(self):
- return u'%s %s' % (self.firstname, self.lastname)
-
-class Company(models.Model):
- name = models.CharField(max_length=100)
- num_employees = models.PositiveIntegerField()
- num_chairs = models.PositiveIntegerField()
- ceo = models.ForeignKey(
- Employee,
- related_name='company_ceo_set')
- point_of_contact = models.ForeignKey(
- Employee,
- related_name='company_point_of_contact_set',
- null=True)
-
- def __unicode__(self):
- return self.name
diff --git a/tests/django14/expressions/tests.py b/tests/django14/expressions/tests.py
deleted file mode 100644
index 8f4f5461..00000000
--- a/tests/django14/expressions/tests.py
+++ /dev/null
@@ -1,220 +0,0 @@
-from __future__ import absolute_import
-
-from django.core.exceptions import FieldError
-from django.db.models import F
-from django.test import TestCase
-
-from .models import Company, Employee
-
-
-class ExpressionsTests(TestCase):
- def test_filter(self):
- Company.objects.create(
- name="Example Inc.", num_employees=2300, num_chairs=5,
- ceo=Employee.objects.create(firstname="Joe", lastname="Smith")
- )
- Company.objects.create(
- name="Foobar Ltd.", num_employees=3, num_chairs=4,
- ceo=Employee.objects.create(firstname="Frank", lastname="Meyer")
- )
- Company.objects.create(
- name="Test GmbH", num_employees=32, num_chairs=1,
- ceo=Employee.objects.create(firstname="Max", lastname="Mustermann")
- )
-
- company_query = Company.objects.values(
- "name", "num_employees", "num_chairs"
- ).order_by(
- "name", "num_employees", "num_chairs"
- )
-
- # We can filter for companies where the number of employees is greater
- # than the number of chairs.
- self.assertQuerysetEqual(
- company_query.filter(num_employees__gt=F("num_chairs")), [
- {
- "num_chairs": 5,
- "name": "Example Inc.",
- "num_employees": 2300,
- },
- {
- "num_chairs": 1,
- "name": "Test GmbH",
- "num_employees": 32
- },
- ],
- lambda o: o
- )
-
- # We can set one field to have the value of another field
- # Make sure we have enough chairs
- company_query.update(num_chairs=F("num_employees"))
- self.assertQuerysetEqual(
- company_query, [
- {
- "num_chairs": 2300,
- "name": "Example Inc.",
- "num_employees": 2300
- },
- {
- "num_chairs": 3,
- "name": "Foobar Ltd.",
- "num_employees": 3
- },
- {
- "num_chairs": 32,
- "name": "Test GmbH",
- "num_employees": 32
- }
- ],
- lambda o: o
- )
-
- # We can perform arithmetic operations in expressions
- # Make sure we have 2 spare chairs
- company_query.update(num_chairs=F("num_employees")+2)
- self.assertQuerysetEqual(
- company_query, [
- {
- 'num_chairs': 2302,
- 'name': u'Example Inc.',
- 'num_employees': 2300
- },
- {
- 'num_chairs': 5,
- 'name': u'Foobar Ltd.',
- 'num_employees': 3
- },
- {
- 'num_chairs': 34,
- 'name': u'Test GmbH',
- 'num_employees': 32
- }
- ],
- lambda o: o,
- )
-
- # Law of order of operations is followed
- company_query.update(
- num_chairs=F('num_employees') + 2 * F('num_employees')
- )
- self.assertQuerysetEqual(
- company_query, [
- {
- 'num_chairs': 6900,
- 'name': u'Example Inc.',
- 'num_employees': 2300
- },
- {
- 'num_chairs': 9,
- 'name': u'Foobar Ltd.',
- 'num_employees': 3
- },
- {
- 'num_chairs': 96,
- 'name': u'Test GmbH',
- 'num_employees': 32
- }
- ],
- lambda o: o,
- )
-
- # Law of order of operations can be overridden by parentheses
- company_query.update(
- num_chairs=((F('num_employees') + 2) * F('num_employees'))
- )
- self.assertQuerysetEqual(
- company_query, [
- {
- 'num_chairs': 5294600,
- 'name': u'Example Inc.',
- 'num_employees': 2300
- },
- {
- 'num_chairs': 15,
- 'name': u'Foobar Ltd.',
- 'num_employees': 3
- },
- {
- 'num_chairs': 1088,
- 'name': u'Test GmbH',
- 'num_employees': 32
- }
- ],
- lambda o: o,
- )
-
- # The relation of a foreign key can become copied over to an other
- # foreign key.
- self.assertEqual(
- Company.objects.update(point_of_contact=F('ceo')),
- 3
- )
- self.assertQuerysetEqual(
- Company.objects.all(), [
- "Joe Smith",
- "Frank Meyer",
- "Max Mustermann",
- ],
- lambda c: unicode(c.point_of_contact),
- )
-
- c = Company.objects.all()[0]
- c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum")
- c.save()
-
- # F Expressions can also span joins
- self.assertQuerysetEqual(
- Company.objects.filter(ceo__firstname=F("point_of_contact__firstname")), [
- "Foobar Ltd.",
- "Test GmbH",
- ],
- lambda c: c.name
- )
-
- Company.objects.exclude(
- ceo__firstname=F("point_of_contact__firstname")
- ).update(name="foo")
- self.assertEqual(
- Company.objects.exclude(
- ceo__firstname=F('point_of_contact__firstname')
- ).get().name,
- "foo",
- )
-
- self.assertRaises(FieldError,
- lambda: Company.objects.exclude(
- ceo__firstname=F('point_of_contact__firstname')
- ).update(name=F('point_of_contact__lastname'))
- )
-
- # F expressions can be used to update attributes on single objects
- test_gmbh = Company.objects.get(name="Test GmbH")
- self.assertEqual(test_gmbh.num_employees, 32)
- test_gmbh.num_employees = F("num_employees") + 4
- test_gmbh.save()
- test_gmbh = Company.objects.get(pk=test_gmbh.pk)
- self.assertEqual(test_gmbh.num_employees, 36)
-
- # F expressions cannot be used to update attributes which are foreign
- # keys, or attributes which involve joins.
- test_gmbh.point_of_contact = None
- test_gmbh.save()
- self.assertTrue(test_gmbh.point_of_contact is None)
- def test():
- test_gmbh.point_of_contact = F("ceo")
- self.assertRaises(ValueError, test)
-
- test_gmbh.point_of_contact = test_gmbh.ceo
- test_gmbh.save()
- test_gmbh.name = F("ceo__last_name")
- self.assertRaises(FieldError, test_gmbh.save)
-
- # F expressions cannot be used to update attributes on objects which do
- # not yet exist in the database
- acme = Company(
- name="The Acme Widget Co.", num_employees=12, num_chairs=5,
- ceo=test_gmbh.ceo
- )
- acme.num_employees = F("num_employees") + 16
- self.assertRaises(TypeError, acme.save)
diff --git a/tests/django14/expressions_regress/models.py b/tests/django14/expressions_regress/models.py
deleted file mode 100644
index 0ebccb50..00000000
--- a/tests/django14/expressions_regress/models.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
-Model for testing arithmetic expressions.
-"""
-from django.db import models
-
-
-class Number(models.Model):
- integer = models.IntegerField(db_column='the_integer')
- float = models.FloatField(null=True, db_column='the_float')
-
- def __unicode__(self):
- return u'%i, %.3f' % (self.integer, self.float)
-
-class Experiment(models.Model):
- name = models.CharField(max_length=24)
- assigned = models.DateField()
- completed = models.DateField()
- start = models.DateTimeField()
- end = models.DateTimeField()
-
- class Meta:
- ordering = ('name',)
-
- def duration(self):
- return self.end - self.start
-
diff --git a/tests/django14/expressions_regress/tests.py b/tests/django14/expressions_regress/tests.py
deleted file mode 100644
index 80ddfadb..00000000
--- a/tests/django14/expressions_regress/tests.py
+++ /dev/null
@@ -1,399 +0,0 @@
-"""
-Spanning tests for all the operations that F() expressions can perform.
-"""
-from __future__ import absolute_import
-
-import datetime
-
-from django.db import connection
-from django.db.models import F
-from django.test import TestCase, Approximate, skipUnlessDBFeature
-
-from .models import Number, Experiment
-
-
-class ExpressionsRegressTests(TestCase):
-
- def setUp(self):
- Number(integer=-1).save()
- Number(integer=42).save()
- Number(integer=1337).save()
- self.assertEqual(Number.objects.update(float=F('integer')), 3)
-
- def test_fill_with_value_from_same_object(self):
- """
- We can fill a value in all objects with an other value of the
- same object.
- """
- self.assertQuerysetEqual(
- Number.objects.all(),
- [
- '',
- '',
- ''
- ]
- )
-
- def test_increment_value(self):
- """
- We can increment a value of all objects in a query set.
- """
- self.assertEqual(
- Number.objects.filter(integer__gt=0)
- .update(integer=F('integer') + 1),
- 2)
-
- self.assertQuerysetEqual(
- Number.objects.all(),
- [
- '',
- '',
- ''
- ]
- )
-
- def test_filter_not_equals_other_field(self):
- """
- We can filter for objects, where a value is not equals the value
- of an other field.
- """
- self.assertEqual(
- Number.objects.filter(integer__gt=0)
- .update(integer=F('integer') + 1),
- 2)
- self.assertQuerysetEqual(
- Number.objects.exclude(float=F('integer')),
- [
- '',
- ''
- ]
- )
-
- def test_complex_expressions(self):
- """
- Complex expressions of different connection types are possible.
- """
- n = Number.objects.create(integer=10, float=123.45)
- self.assertEqual(Number.objects.filter(pk=n.pk)
- .update(float=F('integer') + F('float') * 2),
- 1)
-
- self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
- self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
-
-class ExpressionOperatorTests(TestCase):
- def setUp(self):
- self.n = Number.objects.create(integer=42, float=15.5)
-
- def test_lefthand_addition(self):
- # LH Addition of floats and integers
- Number.objects.filter(pk=self.n.pk).update(
- integer=F('integer') + 15,
- float=F('float') + 42.7
- )
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
-
- def test_lefthand_subtraction(self):
- # LH Subtraction of floats and integers
- Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15,
- float=F('float') - 42.7)
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
-
- def test_lefthand_multiplication(self):
- # Multiplication of floats and integers
- Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15,
- float=F('float') * 42.7)
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
-
- def test_lefthand_division(self):
- # LH Division of floats and integers
- Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2,
- float=F('float') / 42.7)
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
-
- def test_lefthand_modulo(self):
- # LH Modulo arithmetic on integers
- Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
-
- def test_lefthand_bitwise_and(self):
- # LH Bitwise ands on integers
- Number.objects.filter(pk=self.n.pk).update(integer=F('integer') & 56)
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
-
- @skipUnlessDBFeature('supports_bitwise_or')
- def test_lefthand_bitwise_or(self):
- # LH Bitwise or on integers
- Number.objects.filter(pk=self.n.pk).update(integer=F('integer') | 48)
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
-
- def test_right_hand_addition(self):
- # Right hand operators
- Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'),
- float=42.7 + F('float'))
-
- # RH Addition of floats and integers
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
-
- def test_right_hand_subtraction(self):
- Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'),
- float=42.7 - F('float'))
-
- # RH Subtraction of floats and integers
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
-
- def test_right_hand_multiplication(self):
- # RH Multiplication of floats and integers
- Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'),
- float=42.7 * F('float'))
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
-
- def test_right_hand_division(self):
- # RH Division of floats and integers
- Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'),
- float=42.7 / F('float'))
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
-
- def test_right_hand_modulo(self):
- # RH Modulo arithmetic on integers
- Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
-
- def test_right_hand_bitwise_and(self):
- # RH Bitwise ands on integers
- Number.objects.filter(pk=self.n.pk).update(integer=15 & F('integer'))
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
-
- @skipUnlessDBFeature('supports_bitwise_or')
- def test_right_hand_bitwise_or(self):
- # RH Bitwise or on integers
- Number.objects.filter(pk=self.n.pk).update(integer=15 | F('integer'))
-
- self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 47)
- self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
-
-class FTimeDeltaTests(TestCase):
-
- def setUp(self):
- sday = datetime.date(2010, 6, 25)
- stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
- midnight = datetime.time(0)
-
- delta0 = datetime.timedelta(0)
- delta1 = datetime.timedelta(microseconds=253000)
- delta2 = datetime.timedelta(seconds=44)
- delta3 = datetime.timedelta(hours=21, minutes=8)
- delta4 = datetime.timedelta(days=10)
-
- # Test data is set so that deltas and delays will be
- # strictly increasing.
- self.deltas = []
- self.delays = []
- self.days_long = []
-
- # e0: started same day as assigned, zero duration
- end = stime+delta0
- e0 = Experiment.objects.create(name='e0', assigned=sday, start=stime,
- end=end, completed=end.date())
- self.deltas.append(delta0)
- self.delays.append(e0.start-
- datetime.datetime.combine(e0.assigned, midnight))
- self.days_long.append(e0.completed-e0.assigned)
-
- # e1: started one day after assigned, tiny duration, data
- # set so that end time has no fractional seconds, which
- # tests an edge case on sqlite. This Experiment is only
- # included in the test data when the DB supports microsecond
- # precision.
- if connection.features.supports_microsecond_precision:
- delay = datetime.timedelta(1)
- end = stime + delay + delta1
- e1 = Experiment.objects.create(name='e1', assigned=sday,
- start=stime+delay, end=end, completed=end.date())
- self.deltas.append(delta1)
- self.delays.append(e1.start-
- datetime.datetime.combine(e1.assigned, midnight))
- self.days_long.append(e1.completed-e1.assigned)
-
- # e2: started three days after assigned, small duration
- end = stime+delta2
- e2 = Experiment.objects.create(name='e2',
- assigned=sday-datetime.timedelta(3), start=stime, end=end,
- completed=end.date())
- self.deltas.append(delta2)
- self.delays.append(e2.start-
- datetime.datetime.combine(e2.assigned, midnight))
- self.days_long.append(e2.completed-e2.assigned)
-
- # e3: started four days after assigned, medium duration
- delay = datetime.timedelta(4)
- end = stime + delay + delta3
- e3 = Experiment.objects.create(name='e3',
- assigned=sday, start=stime+delay, end=end, completed=end.date())
- self.deltas.append(delta3)
- self.delays.append(e3.start-
- datetime.datetime.combine(e3.assigned, midnight))
- self.days_long.append(e3.completed-e3.assigned)
-
- # e4: started 10 days after assignment, long duration
- end = stime + delta4
- e4 = Experiment.objects.create(name='e4',
- assigned=sday-datetime.timedelta(10), start=stime, end=end,
- completed=end.date())
- self.deltas.append(delta4)
- self.delays.append(e4.start-
- datetime.datetime.combine(e4.assigned, midnight))
- self.days_long.append(e4.completed-e4.assigned)
- self.expnames = [e.name for e in Experiment.objects.all()]
-
- def test_delta_add(self):
- for i in range(len(self.deltas)):
- delta = self.deltas[i]
- test_set = [e.name for e in
- Experiment.objects.filter(end__lt=F('start')+delta)]
- self.assertEqual(test_set, self.expnames[:i])
-
- test_set = [e.name for e in
- Experiment.objects.filter(end__lte=F('start')+delta)]
- self.assertEqual(test_set, self.expnames[:i+1])
-
- def test_delta_subtract(self):
- for i in range(len(self.deltas)):
- delta = self.deltas[i]
- test_set = [e.name for e in
- Experiment.objects.filter(start__gt=F('end')-delta)]
- self.assertEqual(test_set, self.expnames[:i])
-
- test_set = [e.name for e in
- Experiment.objects.filter(start__gte=F('end')-delta)]
- self.assertEqual(test_set, self.expnames[:i+1])
-
- def test_exclude(self):
- for i in range(len(self.deltas)):
- delta = self.deltas[i]
- test_set = [e.name for e in
- Experiment.objects.exclude(end__lt=F('start')+delta)]
- self.assertEqual(test_set, self.expnames[i:])
-
- test_set = [e.name for e in
- Experiment.objects.exclude(end__lte=F('start')+delta)]
- self.assertEqual(test_set, self.expnames[i+1:])
-
- def test_date_comparison(self):
- for i in range(len(self.days_long)):
- days = self.days_long[i]
- test_set = [e.name for e in
- Experiment.objects.filter(completed__lt=F('assigned')+days)]
- self.assertEqual(test_set, self.expnames[:i])
-
- test_set = [e.name for e in
- Experiment.objects.filter(completed__lte=F('assigned')+days)]
- self.assertEqual(test_set, self.expnames[:i+1])
-
- @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons")
- def test_mixed_comparisons1(self):
- for i in range(len(self.delays)):
- delay = self.delays[i]
- if not connection.features.supports_microsecond_precision:
- delay = datetime.timedelta(delay.days, delay.seconds)
- test_set = [e.name for e in
- Experiment.objects.filter(assigned__gt=F('start')-delay)]
- self.assertEqual(test_set, self.expnames[:i])
-
- test_set = [e.name for e in
- Experiment.objects.filter(assigned__gte=F('start')-delay)]
- self.assertEqual(test_set, self.expnames[:i+1])
-
- def test_mixed_comparisons2(self):
- delays = [datetime.timedelta(delay.days) for delay in self.delays]
- for i in range(len(delays)):
- delay = delays[i]
- test_set = [e.name for e in
- Experiment.objects.filter(start__lt=F('assigned')+delay)]
- self.assertEqual(test_set, self.expnames[:i])
-
- test_set = [e.name for e in
- Experiment.objects.filter(start__lte=F('assigned')+delay+
- datetime.timedelta(1))]
- self.assertEqual(test_set, self.expnames[:i+1])
-
- def test_delta_update(self):
- for i in range(len(self.deltas)):
- delta = self.deltas[i]
- exps = Experiment.objects.all()
- expected_durations = [e.duration() for e in exps]
- expected_starts = [e.start+delta for e in exps]
- expected_ends = [e.end+delta for e in exps]
-
- Experiment.objects.update(start=F('start')+delta, end=F('end')+delta)
- exps = Experiment.objects.all()
- new_starts = [e.start for e in exps]
- new_ends = [e.end for e in exps]
- new_durations = [e.duration() for e in exps]
- self.assertEqual(expected_starts, new_starts)
- self.assertEqual(expected_ends, new_ends)
- self.assertEqual(expected_durations, new_durations)
-
- def test_delta_invalid_op_mult(self):
- raised = False
- try:
- r = repr(Experiment.objects.filter(end__lt=F('start')*self.deltas[0]))
- except TypeError:
- raised = True
- self.assertTrue(raised, "TypeError not raised on attempt to multiply datetime by timedelta.")
-
- def test_delta_invalid_op_div(self):
- raised = False
- try:
- r = repr(Experiment.objects.filter(end__lt=F('start')/self.deltas[0]))
- except TypeError:
- raised = True
- self.assertTrue(raised, "TypeError not raised on attempt to divide datetime by timedelta.")
-
- def test_delta_invalid_op_mod(self):
- raised = False
- try:
- r = repr(Experiment.objects.filter(end__lt=F('start')%self.deltas[0]))
- except TypeError:
- raised = True
- self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.")
-
- def test_delta_invalid_op_and(self):
- raised = False
- try:
- r = repr(Experiment.objects.filter(end__lt=F('start')&self.deltas[0]))
- except TypeError:
- raised = True
- self.assertTrue(raised, "TypeError not raised on attempt to binary and a datetime with a timedelta.")
-
- def test_delta_invalid_op_or(self):
- raised = False
- try:
- r = repr(Experiment.objects.filter(end__lt=F('start')|self.deltas[0]))
- except TypeError:
- raised = True
- self.assertTrue(raised, "TypeError not raised on attempt to binary or a datetime with a timedelta.")
diff --git a/tests/django14/force_insert_update/tests.py b/tests/django14/force_insert_update/tests.py
deleted file mode 100644
index a5b2dceb..00000000
--- a/tests/django14/force_insert_update/tests.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from __future__ import absolute_import
-
-from django.db import transaction, IntegrityError, DatabaseError
-from django.test import TestCase
-
-from .models import (Counter, WithCustomPK, InheritedCounter, ProxyCounter,
- SubCounter)
-
-
-class ForceTests(TestCase):
- def test_force_update(self):
- c = Counter.objects.create(name="one", value=1)
-
- # The normal case
- c.value = 2
- c.save()
- # Same thing, via an update
- c.value = 3
- c.save(force_update=True)
-
- # Won't work because force_update and force_insert are mutually
- # exclusive
- c.value = 4
- self.assertRaises(ValueError, c.save, force_insert=True, force_update=True)
-
- # Try to update something that doesn't have a primary key in the first
- # place.
- c1 = Counter(name="two", value=2)
- self.assertRaises(ValueError, c1.save, force_update=True)
- c1.save(force_insert=True)
-
- # Won't work because we can't insert a pk of the same value.
- sid = transaction.savepoint()
- c.value = 5
- self.assertRaises(IntegrityError, c.save, force_insert=True)
- transaction.savepoint_rollback(sid)
-
- # Trying to update should still fail, even with manual primary keys, if
- # the data isn't in the database already.
- obj = WithCustomPK(name=1, value=1)
- self.assertRaises(DatabaseError, obj.save, force_update=True)
-
-
-class InheritanceTests(TestCase):
- def test_force_update_on_inherited_model(self):
- a = InheritedCounter(name="count", value=1, tag="spam")
- a.save()
- a.save(force_update=True)
-
- def test_force_update_on_proxy_model(self):
- a = ProxyCounter(name="count", value=1)
- a.save()
- a.save(force_update=True)
-
- def test_force_update_on_inherited_model_without_fields(self):
- '''
- Issue 13864: force_update fails on subclassed models, if they don't
- specify custom fields.
- '''
- a = SubCounter(name="count", value=1)
- a.save()
- a.value = 2
- a.save(force_update=True)
diff --git a/tests/django14/generic_relations/models.py b/tests/django14/generic_relations/models.py
deleted file mode 100644
index f3e216ed..00000000
--- a/tests/django14/generic_relations/models.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""
-34. Generic relations
-
-Generic relations let an object have a foreign key to any object through a
-content-type/object-id field. A ``GenericForeignKey`` field can point to any
-object, be it animal, vegetable, or mineral.
-
-The canonical example is tags (although this example implementation is *far*
-from complete).
-"""
-
-from django.contrib.contenttypes import generic
-from django.contrib.contenttypes.models import ContentType
-from django.db import models
-
-
-class TaggedItem(models.Model):
- """A tag on an item."""
- tag = models.SlugField()
- content_type = models.ForeignKey(ContentType)
- object_id = models.PositiveIntegerField()
-
- content_object = generic.GenericForeignKey()
-
- class Meta:
- ordering = ["tag", "content_type__name"]
-
- def __unicode__(self):
- return self.tag
-
-class ValuableTaggedItem(TaggedItem):
- value = models.PositiveIntegerField()
-
-class Comparison(models.Model):
- """
- A model that tests having multiple GenericForeignKeys
- """
- comparative = models.CharField(max_length=50)
-
- content_type1 = models.ForeignKey(ContentType, related_name="comparative1_set")
- object_id1 = models.PositiveIntegerField()
-
- content_type2 = models.ForeignKey(ContentType, related_name="comparative2_set")
- object_id2 = models.PositiveIntegerField()
-
- first_obj = generic.GenericForeignKey(ct_field="content_type1", fk_field="object_id1")
- other_obj = generic.GenericForeignKey(ct_field="content_type2", fk_field="object_id2")
-
- def __unicode__(self):
- return u"%s is %s than %s" % (self.first_obj, self.comparative, self.other_obj)
-
-class Animal(models.Model):
- common_name = models.CharField(max_length=150)
- latin_name = models.CharField(max_length=150)
-
- tags = generic.GenericRelation(TaggedItem)
- comparisons = generic.GenericRelation(Comparison,
- object_id_field="object_id1",
- content_type_field="content_type1")
-
- def __unicode__(self):
- return self.common_name
-
-class Vegetable(models.Model):
- name = models.CharField(max_length=150)
- is_yucky = models.BooleanField(default=True)
-
- tags = generic.GenericRelation(TaggedItem)
-
- def __unicode__(self):
- return self.name
-
-class Mineral(models.Model):
- name = models.CharField(max_length=150)
- hardness = models.PositiveSmallIntegerField()
-
- # note the lack of an explicit GenericRelation here...
-
- def __unicode__(self):
- return self.name
-
-class GeckoManager(models.Manager):
- def get_query_set(self):
- return super(GeckoManager, self).get_query_set().filter(has_tail=True)
-
-class Gecko(models.Model):
- has_tail = models.BooleanField()
- objects = GeckoManager()
diff --git a/tests/django14/generic_relations/tests.py b/tests/django14/generic_relations/tests.py
deleted file mode 100644
index 0ac552cf..00000000
--- a/tests/django14/generic_relations/tests.py
+++ /dev/null
@@ -1,251 +0,0 @@
-from __future__ import absolute_import
-
-from django import forms
-from django.contrib.contenttypes.generic import generic_inlineformset_factory
-from django.contrib.contenttypes.models import ContentType
-from django.test import TestCase
-
-from .models import (TaggedItem, ValuableTaggedItem, Comparison, Animal,
- Vegetable, Mineral, Gecko)
-
-
-class GenericRelationsTests(TestCase):
- def test_generic_relations(self):
- # Create the world in 7 lines of code...
- lion = Animal.objects.create(common_name="Lion", latin_name="Panthera leo")
- platypus = Animal.objects.create(
- common_name="Platypus", latin_name="Ornithorhynchus anatinus"
- )
- eggplant = Vegetable.objects.create(name="Eggplant", is_yucky=True)
- bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
- quartz = Mineral.objects.create(name="Quartz", hardness=7)
-
- # Objects with declared GenericRelations can be tagged directly -- the
- # API mimics the many-to-many API.
- bacon.tags.create(tag="fatty")
- bacon.tags.create(tag="salty")
- lion.tags.create(tag="yellow")
- lion.tags.create(tag="hairy")
- platypus.tags.create(tag="fatty")
- self.assertQuerysetEqual(lion.tags.all(), [
- "",
- ""
- ])
- self.assertQuerysetEqual(bacon.tags.all(), [
- "",
- ""
- ])
-
- # You can easily access the content object like a foreign key.
- t = TaggedItem.objects.get(tag="salty")
- self.assertEqual(t.content_object, bacon)
-
- # Recall that the Mineral class doesn't have an explicit GenericRelation
- # defined. That's OK, because you can create TaggedItems explicitly.
- tag1 = TaggedItem.objects.create(content_object=quartz, tag="shiny")
- tag2 = TaggedItem.objects.create(content_object=quartz, tag="clearish")
-
- # However, excluding GenericRelations means your lookups have to be a
- # bit more explicit.
- ctype = ContentType.objects.get_for_model(quartz)
- q = TaggedItem.objects.filter(
- content_type__pk=ctype.id, object_id=quartz.id
- )
- self.assertQuerysetEqual(q, [
- "",
- ""
- ])
-
- # You can set a generic foreign key in the way you'd expect.
- tag1.content_object = platypus
- tag1.save()
- self.assertQuerysetEqual(platypus.tags.all(), [
- "",
- ""
- ])
- q = TaggedItem.objects.filter(
- content_type__pk=ctype.id, object_id=quartz.id
- )
- self.assertQuerysetEqual(q, [""])
-
- # Queries across generic relations respect the content types. Even
- # though there are two TaggedItems with a tag of "fatty", this query
- # only pulls out the one with the content type related to Animals.
- self.assertQuerysetEqual(Animal.objects.order_by('common_name'), [
- "",
- ""
- ])
- self.assertQuerysetEqual(Animal.objects.filter(tags__tag='fatty'), [
- ""
- ])
- self.assertQuerysetEqual(Animal.objects.exclude(tags__tag='fatty'), [
- ""
- ])
-
- # If you delete an object with an explicit Generic relation, the related
- # objects are deleted when the source object is deleted.
- # Original list of tags:
- comp_func = lambda obj: (
- obj.tag, obj.content_type.model_class(), obj.object_id
- )
-
- self.assertQuerysetEqual(TaggedItem.objects.all(), [
- (u'clearish', Mineral, quartz.pk),
- (u'fatty', Animal, platypus.pk),
- (u'fatty', Vegetable, bacon.pk),
- (u'hairy', Animal, lion.pk),
- (u'salty', Vegetable, bacon.pk),
- (u'shiny', Animal, platypus.pk),
- (u'yellow', Animal, lion.pk)
- ],
- comp_func
- )
- lion.delete()
- self.assertQuerysetEqual(TaggedItem.objects.all(), [
- (u'clearish', Mineral, quartz.pk),
- (u'fatty', Animal, platypus.pk),
- (u'fatty', Vegetable, bacon.pk),
- (u'salty', Vegetable, bacon.pk),
- (u'shiny', Animal, platypus.pk)
- ],
- comp_func
- )
-
- # If Generic Relation is not explicitly defined, any related objects
- # remain after deletion of the source object.
- quartz_pk = quartz.pk
- quartz.delete()
- self.assertQuerysetEqual(TaggedItem.objects.all(), [
- (u'clearish', Mineral, quartz_pk),
- (u'fatty', Animal, platypus.pk),
- (u'fatty', Vegetable, bacon.pk),
- (u'salty', Vegetable, bacon.pk),
- (u'shiny', Animal, platypus.pk)
- ],
- comp_func
- )
- # If you delete a tag, the objects using the tag are unaffected
- # (other than losing a tag)
- tag = TaggedItem.objects.order_by("id")[0]
- tag.delete()
- self.assertQuerysetEqual(bacon.tags.all(), [""])
- self.assertQuerysetEqual(TaggedItem.objects.all(), [
- (u'clearish', Mineral, quartz_pk),
- (u'fatty', Animal, platypus.pk),
- (u'salty', Vegetable, bacon.pk),
- (u'shiny', Animal, platypus.pk)
- ],
- comp_func
- )
- TaggedItem.objects.filter(tag='fatty').delete()
- ctype = ContentType.objects.get_for_model(lion)
- self.assertQuerysetEqual(Animal.objects.filter(tags__content_type=ctype), [
- ""
- ])
-
-
- def test_multiple_gfk(self):
- # Simple tests for multiple GenericForeignKeys
- # only uses one model, since the above tests should be sufficient.
- tiger = Animal.objects.create(common_name="tiger")
- cheetah = Animal.objects.create(common_name="cheetah")
- bear = Animal.objects.create(common_name="bear")
-
- # Create directly
- Comparison.objects.create(
- first_obj=cheetah, other_obj=tiger, comparative="faster"
- )
- Comparison.objects.create(
- first_obj=tiger, other_obj=cheetah, comparative="cooler"
- )
-
- # Create using GenericRelation
- tiger.comparisons.create(other_obj=bear, comparative="cooler")
- tiger.comparisons.create(other_obj=cheetah, comparative="stronger")
- self.assertQuerysetEqual(cheetah.comparisons.all(), [
- ""
- ])
-
- # Filtering works
- self.assertQuerysetEqual(tiger.comparisons.filter(comparative="cooler"), [
- "",
- ""
- ])
-
- # Filtering and deleting works
- subjective = ["cooler"]
- tiger.comparisons.filter(comparative__in=subjective).delete()
- self.assertQuerysetEqual(Comparison.objects.all(), [
- "",
- ""
- ])
-
- # If we delete cheetah, Comparisons with cheetah as 'first_obj' will be
- # deleted since Animal has an explicit GenericRelation to Comparison
- # through first_obj. Comparisons with cheetah as 'other_obj' will not
- # be deleted.
- cheetah.delete()
- self.assertQuerysetEqual(Comparison.objects.all(), [
- ""
- ])
-
- def test_gfk_subclasses(self):
- # GenericForeignKey should work with subclasses (see #8309)
- quartz = Mineral.objects.create(name="Quartz", hardness=7)
- valuedtag = ValuableTaggedItem.objects.create(
- content_object=quartz, tag="shiny", value=10
- )
- self.assertEqual(valuedtag.content_object, quartz)
-
- def test_generic_inline_formsets(self):
- GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
- formset = GenericFormSet()
- self.assertHTMLEqual(u''.join(form.as_p() for form in formset.forms), u"""
-
""")
-
- formset = GenericFormSet(instance=Animal())
- self.assertHTMLEqual(u''.join(form.as_p() for form in formset.forms), u"""