{% endblock %}
\ No newline at end of file
diff --git a/application/urls.py b/application/urls.py
index 5f082be..3f455da 100644
--- a/application/urls.py
+++ b/application/urls.py
@@ -10,4 +10,6 @@ urlpatterns = [
url(r'^drafts/$', views.post_draft_list, name='post_draft_list'),
url(r'^post/(?P\d+)/publish/$', views.post_publish, name='post_publish'),
url(r'^post/(?P\d+)/remove/$', views.post_remove, name='post_remove'),
-]
\ No newline at end of file
+]
+
+""" url(r'^search/', views.blog_search_list_view, name='blog_search_list_view'), """
diff --git a/application/views.py b/application/views.py
index b7a6b16..67eb97c 100644
--- a/application/views.py
+++ b/application/views.py
@@ -6,8 +6,11 @@ from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth import authenticate, login, logout
+from django.db.models import Q
+
import logging
import mysite.settings
+import operator
# Create your views here.
@@ -36,17 +39,20 @@ def navlogin(request):
context = {'error': error}
return render(request, 'index.html', context)
+
@login_required
def post_list(request):
posts = Post.objects.filter(
published_date__lte=timezone.now()).order_by('published_date')
return render(request, 'post_list.html', {'posts': posts})
+
@login_required
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'post_detail.html', {'post': post})
+
@login_required
@staff_member_required
def post_new(request):
@@ -61,6 +67,7 @@ def post_new(request):
form = PostForm()
return render(request, 'post_edit.html', {'form': form})
+
@login_required
@staff_member_required
def post_edit(request, pk):
@@ -76,6 +83,7 @@ def post_edit(request, pk):
form = PostForm(instance=post)
return render(request, 'post_edit.html', {'form': form})
+
@login_required
@staff_member_required
def post_draft_list(request):
@@ -83,6 +91,7 @@ def post_draft_list(request):
published_date__isnull=True).order_by('created_date')
return render(request, 'post_draft_list.html', {'posts': posts})
+
@login_required
@staff_member_required
def post_publish(request, pk):
@@ -90,6 +99,7 @@ def post_publish(request, pk):
post.publish()
return redirect('post_detail', pk=pk)
+
@login_required
@staff_member_required
def post_remove(request, pk):
@@ -97,6 +107,21 @@ def post_remove(request, pk):
post.delete()
return redirect('post_list')
+
@login_required
def student_page(request):
return render(request, 'student_page.html', {})
+
+""" Search for querys
+def blog_search_list_view(post_list, self):
+ result = super(post_list, self).get_queryset()
+ query = self.request.GET.get('q')
+ if query:
+ query_list = query.split()
+ result = result.filter(
+ reduce(operator.and_,
+ (Q(title__icontains=q) for q in query_list)) |
+ reduce(operator.and_,
+ (Q(content__icontains=q) for q in query_list))
+ )
+ return result """
\ No newline at end of file
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/admin/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/admin/migrations/0001_initial.py
deleted file mode 100644
index f1e2804..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/admin/migrations/0001_initial.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import django.contrib.admin.models
-from django.conf import settings
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ('contenttypes', '__first__'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='LogEntry',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('action_time', models.DateTimeField(auto_now=True, verbose_name='action time')),
- ('object_id', models.TextField(null=True, verbose_name='object id', blank=True)),
- ('object_repr', models.CharField(max_length=200, verbose_name='object repr')),
- ('action_flag', models.PositiveSmallIntegerField(verbose_name='action flag')),
- ('change_message', models.TextField(verbose_name='change message', blank=True)),
- ('content_type', models.ForeignKey(
- to_field='id',
- on_delete=models.SET_NULL,
- blank=True, null=True,
- to='contenttypes.ContentType',
- verbose_name='content type',
- )),
- ('user', models.ForeignKey(
- to=settings.AUTH_USER_MODEL,
- on_delete=models.CASCADE,
- verbose_name='user',
- )),
- ],
- options={
- 'ordering': ('-action_time',),
- 'db_table': 'django_admin_log',
- 'verbose_name': 'log entry',
- 'verbose_name_plural': 'log entries',
- },
- bases=(models.Model,),
- managers=[
- ('objects', django.contrib.admin.models.LogEntryManager()),
- ],
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/admin/migrations/0002_logentry_remove_auto_add.py b/thesisenv/lib/python3.6/site-packages/django/contrib/admin/migrations/0002_logentry_remove_auto_add.py
deleted file mode 100644
index a2b1916..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/admin/migrations/0002_logentry_remove_auto_add.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from django.db import migrations, models
-from django.utils import timezone
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('admin', '0001_initial'),
- ]
-
- # No database changes; removes auto_add and adds default/editable.
- operations = [
- migrations.AlterField(
- model_name='logentry',
- name='action_time',
- field=models.DateTimeField(
- verbose_name='action time',
- default=timezone.now,
- editable=False,
- ),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0001_initial.py
deleted file mode 100644
index c1fbe9a..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0001_initial.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import django.contrib.auth.models
-from django.contrib.auth import validators
-from django.db import migrations, models
-from django.utils import timezone
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('contenttypes', '__first__'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Permission',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('name', models.CharField(max_length=50, verbose_name='name')),
- ('content_type', models.ForeignKey(
- to='contenttypes.ContentType',
- on_delete=models.CASCADE,
- to_field='id',
- verbose_name='content type',
- )),
- ('codename', models.CharField(max_length=100, verbose_name='codename')),
- ],
- options={
- 'ordering': ('content_type__app_label', 'content_type__model', 'codename'),
- 'unique_together': {('content_type', 'codename')},
- 'verbose_name': 'permission',
- 'verbose_name_plural': 'permissions',
- },
- managers=[
- ('objects', django.contrib.auth.models.PermissionManager()),
- ],
- ),
- migrations.CreateModel(
- name='Group',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('name', models.CharField(unique=True, max_length=80, verbose_name='name')),
- ('permissions', models.ManyToManyField(to='auth.Permission', verbose_name='permissions', blank=True)),
- ],
- options={
- 'verbose_name': 'group',
- 'verbose_name_plural': 'groups',
- },
- managers=[
- ('objects', django.contrib.auth.models.GroupManager()),
- ],
- ),
- migrations.CreateModel(
- name='User',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('password', models.CharField(max_length=128, verbose_name='password')),
- ('last_login', models.DateTimeField(default=timezone.now, verbose_name='last login')),
- ('is_superuser', models.BooleanField(
- default=False,
- help_text='Designates that this user has all permissions without explicitly assigning them.',
- verbose_name='superuser status'
- )),
- ('username', models.CharField(
- help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True,
- max_length=30, verbose_name='username',
- validators=[validators.UnicodeUsernameValidator()],
- )),
- ('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)),
- ('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)),
- ('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)),
- ('is_staff', models.BooleanField(
- default=False, help_text='Designates whether the user can log into this admin site.',
- verbose_name='staff status'
- )),
- ('is_active', models.BooleanField(
- default=True, verbose_name='active', help_text=(
- 'Designates whether this user should be treated as active. Unselect this instead of deleting '
- 'accounts.'
- )
- )),
- ('date_joined', models.DateTimeField(default=timezone.now, verbose_name='date joined')),
- ('groups', models.ManyToManyField(
- to='auth.Group', verbose_name='groups', blank=True, related_name='user_set',
- related_query_name='user', help_text=(
- 'The groups this user belongs to. A user will get all permissions granted to each of their '
- 'groups.'
- )
- )),
- ('user_permissions', models.ManyToManyField(
- to='auth.Permission', verbose_name='user permissions', blank=True,
- help_text='Specific permissions for this user.', related_name='user_set',
- related_query_name='user')
- ),
- ],
- options={
- 'swappable': 'AUTH_USER_MODEL',
- 'verbose_name': 'user',
- 'verbose_name_plural': 'users',
- },
- managers=[
- ('objects', django.contrib.auth.models.UserManager()),
- ],
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0002_alter_permission_name_max_length.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0002_alter_permission_name_max_length.py
deleted file mode 100644
index 556c320..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0002_alter_permission_name_max_length.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0001_initial'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='permission',
- name='name',
- field=models.CharField(max_length=255, verbose_name='name'),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0003_alter_user_email_max_length.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0003_alter_user_email_max_length.py
deleted file mode 100644
index ee8a9bd..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0003_alter_user_email_max_length.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0002_alter_permission_name_max_length'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='user',
- name='email',
- field=models.EmailField(max_length=254, verbose_name='email address', blank=True),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0004_alter_user_username_opts.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0004_alter_user_username_opts.py
deleted file mode 100644
index a16083e..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0004_alter_user_username_opts.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from django.contrib.auth import validators
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0003_alter_user_email_max_length'),
- ]
-
- # No database changes; modifies validators and error_messages (#13147).
- operations = [
- migrations.AlterField(
- model_name='user',
- name='username',
- field=models.CharField(
- error_messages={'unique': 'A user with that username already exists.'}, max_length=30,
- validators=[validators.UnicodeUsernameValidator()],
- help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.',
- unique=True, verbose_name='username'
- ),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0005_alter_user_last_login_null.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0005_alter_user_last_login_null.py
deleted file mode 100644
index 97cd105..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0005_alter_user_last_login_null.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0004_alter_user_username_opts'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='user',
- name='last_login',
- field=models.DateTimeField(null=True, verbose_name='last login', blank=True),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0006_require_contenttypes_0002.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0006_require_contenttypes_0002.py
deleted file mode 100644
index 48c26be..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0006_require_contenttypes_0002.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0005_alter_user_last_login_null'),
- ('contenttypes', '0002_remove_content_type_name'),
- ]
-
- operations = [
- # Ensure the contenttypes migration is applied before sending
- # post_migrate signals (which create ContentTypes).
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0007_alter_validators_add_error_messages.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0007_alter_validators_add_error_messages.py
deleted file mode 100644
index 42f5087..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0007_alter_validators_add_error_messages.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from django.contrib.auth import validators
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0006_require_contenttypes_0002'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='user',
- name='username',
- field=models.CharField(
- error_messages={'unique': 'A user with that username already exists.'},
- help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.',
- max_length=30,
- unique=True,
- validators=[validators.UnicodeUsernameValidator()],
- verbose_name='username',
- ),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0008_alter_user_username_max_length.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0008_alter_user_username_max_length.py
deleted file mode 100644
index 7c9dae0..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0008_alter_user_username_max_length.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from django.contrib.auth import validators
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0007_alter_validators_add_error_messages'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='user',
- name='username',
- field=models.CharField(
- error_messages={'unique': 'A user with that username already exists.'},
- help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
- max_length=150,
- unique=True,
- validators=[validators.UnicodeUsernameValidator()],
- verbose_name='username',
- ),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0009_alter_user_last_name_max_length.py b/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0009_alter_user_last_name_max_length.py
deleted file mode 100644
index b217359..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/auth/migrations/0009_alter_user_last_name_max_length.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('auth', '0008_alter_user_username_max_length'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='user',
- name='last_name',
- field=models.CharField(blank=True, max_length=150, verbose_name='last name'),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/contenttypes/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/contenttypes/migrations/0001_initial.py
deleted file mode 100644
index e55c320..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/contenttypes/migrations/0001_initial.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import django.contrib.contenttypes.models
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ]
-
- operations = [
- migrations.CreateModel(
- name='ContentType',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('name', models.CharField(max_length=100)),
- ('app_label', models.CharField(max_length=100)),
- ('model', models.CharField(max_length=100, verbose_name='python model class name')),
- ],
- options={
- 'ordering': ('name',),
- 'db_table': 'django_content_type',
- 'verbose_name': 'content type',
- 'verbose_name_plural': 'content types',
- },
- bases=(models.Model,),
- managers=[
- ('objects', django.contrib.contenttypes.models.ContentTypeManager()),
- ],
- ),
- migrations.AlterUniqueTogether(
- name='contenttype',
- unique_together={('app_label', 'model')},
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/contenttypes/migrations/0002_remove_content_type_name.py b/thesisenv/lib/python3.6/site-packages/django/contrib/contenttypes/migrations/0002_remove_content_type_name.py
deleted file mode 100644
index c88e603..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/contenttypes/migrations/0002_remove_content_type_name.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from django.db import migrations, models
-
-
-def add_legacy_name(apps, schema_editor):
- ContentType = apps.get_model('contenttypes', 'ContentType')
- for ct in ContentType.objects.all():
- try:
- ct.name = apps.get_model(ct.app_label, ct.model)._meta.object_name
- except LookupError:
- ct.name = ct.model
- ct.save()
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('contenttypes', '0001_initial'),
- ]
-
- operations = [
- migrations.AlterModelOptions(
- name='contenttype',
- options={'verbose_name': 'content type', 'verbose_name_plural': 'content types'},
- ),
- migrations.AlterField(
- model_name='contenttype',
- name='name',
- field=models.CharField(max_length=100, null=True),
- ),
- migrations.RunPython(
- migrations.RunPython.noop,
- add_legacy_name,
- hints={'model_name': 'contenttype'},
- ),
- migrations.RemoveField(
- model_name='contenttype',
- name='name',
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/flatpages/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/flatpages/migrations/0001_initial.py
deleted file mode 100644
index b385ca5..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/flatpages/migrations/0001_initial.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('sites', '0001_initial'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='FlatPage',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('url', models.CharField(max_length=100, verbose_name='URL', db_index=True)),
- ('title', models.CharField(max_length=200, verbose_name='title')),
- ('content', models.TextField(verbose_name='content', blank=True)),
- ('enable_comments', models.BooleanField(default=False, verbose_name='enable comments')),
- ('template_name', models.CharField(
- help_text=(
- "Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use "
- "'flatpages/default.html'."
- ), max_length=70, verbose_name='template name', blank=True
- )),
- ('registration_required', models.BooleanField(
- default=False, help_text='If this is checked, only logged-in users will be able to view the page.',
- verbose_name='registration required'
- )),
- ('sites', models.ManyToManyField(to='sites.Site', verbose_name='sites')),
- ],
- options={
- 'ordering': ('url',),
- 'db_table': 'django_flatpage',
- 'verbose_name': 'flat page',
- 'verbose_name_plural': 'flat pages',
- },
- bases=(models.Model,),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/redirects/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/redirects/migrations/0001_initial.py
deleted file mode 100644
index 5acf8c9..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/redirects/migrations/0001_initial.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('sites', '0001_initial'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Redirect',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('site', models.ForeignKey(
- to='sites.Site',
- to_field='id',
- on_delete=models.CASCADE,
- verbose_name='site',
- )),
- ('old_path', models.CharField(
- help_text=(
- "This should be an absolute path, excluding the domain name. Example: '/events/search/'."
- ), max_length=200, verbose_name='redirect from', db_index=True
- )),
- ('new_path', models.CharField(
- help_text="This can be either an absolute path (as above) or a full URL starting with 'http://'.",
- max_length=200, verbose_name='redirect to', blank=True
- )),
- ],
- options={
- 'ordering': ('old_path',),
- 'unique_together': {('site', 'old_path')},
- 'db_table': 'django_redirect',
- 'verbose_name': 'redirect',
- 'verbose_name_plural': 'redirects',
- },
- bases=(models.Model,),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/sessions/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/sessions/migrations/0001_initial.py
deleted file mode 100644
index 39eaa6d..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/sessions/migrations/0001_initial.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import django.contrib.sessions.models
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ]
-
- operations = [
- migrations.CreateModel(
- name='Session',
- fields=[
- ('session_key', models.CharField(
- max_length=40, serialize=False, verbose_name='session key', primary_key=True
- )),
- ('session_data', models.TextField(verbose_name='session data')),
- ('expire_date', models.DateTimeField(verbose_name='expire date', db_index=True)),
- ],
- options={
- 'abstract': False,
- 'db_table': 'django_session',
- 'verbose_name': 'session',
- 'verbose_name_plural': 'sessions',
- },
- managers=[
- ('objects', django.contrib.sessions.models.SessionManager()),
- ],
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/sites/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/django/contrib/sites/migrations/0001_initial.py
deleted file mode 100644
index a763986..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/sites/migrations/0001_initial.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import django.contrib.sites.models
-from django.contrib.sites.models import _simple_domain_name_validator
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = []
-
- operations = [
- migrations.CreateModel(
- name='Site',
- fields=[
- ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('domain', models.CharField(
- max_length=100, verbose_name='domain name', validators=[_simple_domain_name_validator]
- )),
- ('name', models.CharField(max_length=50, verbose_name='display name')),
- ],
- options={
- 'ordering': ('domain',),
- 'db_table': 'django_site',
- 'verbose_name': 'site',
- 'verbose_name_plural': 'sites',
- },
- bases=(models.Model,),
- managers=[
- ('objects', django.contrib.sites.models.SiteManager()),
- ],
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/contrib/sites/migrations/0002_alter_domain_unique.py b/thesisenv/lib/python3.6/site-packages/django/contrib/sites/migrations/0002_alter_domain_unique.py
deleted file mode 100644
index 6a26ebc..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/contrib/sites/migrations/0002_alter_domain_unique.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import django.contrib.sites.models
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('sites', '0001_initial'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='site',
- name='domain',
- field=models.CharField(
- max_length=100, unique=True, validators=[django.contrib.sites.models._simple_domain_name_validator],
- verbose_name='domain name'
- ),
- ),
- ]
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/autodetector.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/autodetector.py
deleted file mode 100644
index ece58b9..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/autodetector.py
+++ /dev/null
@@ -1,1236 +0,0 @@
-import functools
-import re
-from itertools import chain
-
-from django.conf import settings
-from django.db import models
-from django.db.migrations import operations
-from django.db.migrations.migration import Migration
-from django.db.migrations.operations.models import AlterModelOptions
-from django.db.migrations.optimizer import MigrationOptimizer
-from django.db.migrations.questioner import MigrationQuestioner
-from django.db.migrations.utils import (
- COMPILED_REGEX_TYPE, RegexObject, get_migration_name_timestamp,
-)
-
-from .topological_sort import stable_topological_sort
-
-
-class MigrationAutodetector:
- """
- Take a pair of ProjectStates and compare them to see what the first would
- need doing to make it match the second (the second usually being the
- project's current state).
-
- Note that this naturally operates on entire projects at a time,
- as it's likely that changes interact (for example, you can't
- add a ForeignKey without having a migration to add the table it
- depends on first). A user interface may offer single-app usage
- if it wishes, with the caveat that it may not always be possible.
- """
-
- def __init__(self, from_state, to_state, questioner=None):
- self.from_state = from_state
- self.to_state = to_state
- self.questioner = questioner or MigrationQuestioner()
- self.existing_apps = {app for app, model in from_state.models}
-
- def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
- """
- Main entry point to produce a list of applicable changes.
- Take a graph to base names on and an optional set of apps
- to try and restrict to (restriction is not guaranteed)
- """
- changes = self._detect_changes(convert_apps, graph)
- changes = self.arrange_for_graph(changes, graph, migration_name)
- if trim_to_apps:
- changes = self._trim_to_apps(changes, trim_to_apps)
- return changes
-
- def deep_deconstruct(self, obj):
- """
- Recursive deconstruction for a field and its arguments.
- Used for full comparison for rename/alter; sometimes a single-level
- deconstruction will not compare correctly.
- """
- if isinstance(obj, list):
- return [self.deep_deconstruct(value) for value in obj]
- elif isinstance(obj, tuple):
- return tuple(self.deep_deconstruct(value) for value in obj)
- elif isinstance(obj, dict):
- return {
- key: self.deep_deconstruct(value)
- for key, value in obj.items()
- }
- elif isinstance(obj, functools.partial):
- return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords))
- elif isinstance(obj, COMPILED_REGEX_TYPE):
- return RegexObject(obj)
- elif isinstance(obj, type):
- # If this is a type that implements 'deconstruct' as an instance method,
- # avoid treating this as being deconstructible itself - see #22951
- return obj
- elif hasattr(obj, 'deconstruct'):
- deconstructed = obj.deconstruct()
- if isinstance(obj, models.Field):
- # we have a field which also returns a name
- deconstructed = deconstructed[1:]
- path, args, kwargs = deconstructed
- return (
- path,
- [self.deep_deconstruct(value) for value in args],
- {
- key: self.deep_deconstruct(value)
- for key, value in kwargs.items()
- },
- )
- else:
- return obj
-
- def only_relation_agnostic_fields(self, fields):
- """
- Return a definition of the fields that ignores field names and
- what related fields actually relate to. Used for detecting renames (as,
- of course, the related fields change during renames).
- """
- fields_def = []
- for name, field in sorted(fields):
- deconstruction = self.deep_deconstruct(field)
- if field.remote_field and field.remote_field.model:
- del deconstruction[2]['to']
- fields_def.append(deconstruction)
- return fields_def
-
- def _detect_changes(self, convert_apps=None, graph=None):
- """
- Return a dict of migration plans which will achieve the
- change from from_state to to_state. The dict has app labels
- as keys and a list of migrations as values.
-
- The resulting migrations aren't specially named, but the names
- do matter for dependencies inside the set.
-
- convert_apps is the list of apps to convert to use migrations
- (i.e. to make initial migrations for, in the usual case)
-
- graph is an optional argument that, if provided, can help improve
- dependency generation and avoid potential circular dependencies.
- """
- # The first phase is generating all the operations for each app
- # and gathering them into a big per-app list.
- # Then go through that list, order it, and split into migrations to
- # resolve dependencies caused by M2Ms and FKs.
- self.generated_operations = {}
- self.altered_indexes = {}
-
- # Prepare some old/new state and model lists, separating
- # proxy models and ignoring unmigrated apps.
- self.old_apps = self.from_state.concrete_apps
- self.new_apps = self.to_state.apps
- self.old_model_keys = set()
- self.old_proxy_keys = set()
- self.old_unmanaged_keys = set()
- self.new_model_keys = set()
- self.new_proxy_keys = set()
- self.new_unmanaged_keys = set()
- for al, mn in self.from_state.models:
- model = self.old_apps.get_model(al, mn)
- if not model._meta.managed:
- self.old_unmanaged_keys.add((al, mn))
- elif al not in self.from_state.real_apps:
- if model._meta.proxy:
- self.old_proxy_keys.add((al, mn))
- else:
- self.old_model_keys.add((al, mn))
-
- for al, mn in self.to_state.models:
- model = self.new_apps.get_model(al, mn)
- if not model._meta.managed:
- self.new_unmanaged_keys.add((al, mn))
- elif (
- al not in self.from_state.real_apps or
- (convert_apps and al in convert_apps)
- ):
- if model._meta.proxy:
- self.new_proxy_keys.add((al, mn))
- else:
- self.new_model_keys.add((al, mn))
-
- # Renames have to come first
- self.generate_renamed_models()
-
- # Prepare lists of fields and generate through model map
- self._prepare_field_lists()
- self._generate_through_model_map()
-
- # Generate non-rename model operations
- self.generate_deleted_models()
- self.generate_created_models()
- self.generate_deleted_proxies()
- self.generate_created_proxies()
- self.generate_altered_options()
- self.generate_altered_managers()
-
- # Create the altered indexes and store them in self.altered_indexes.
- # This avoids the same computation in generate_removed_indexes()
- # and generate_added_indexes().
- self.create_altered_indexes()
- # Generate index removal operations before field is removed
- self.generate_removed_indexes()
- # Generate field operations
- self.generate_renamed_fields()
- self.generate_removed_fields()
- self.generate_added_fields()
- self.generate_altered_fields()
- self.generate_altered_unique_together()
- self.generate_altered_index_together()
- self.generate_added_indexes()
- self.generate_altered_db_table()
- self.generate_altered_order_with_respect_to()
-
- self._sort_migrations()
- self._build_migration_list(graph)
- self._optimize_migrations()
-
- return self.migrations
-
- def _prepare_field_lists(self):
- """
- Prepare field lists and a list of the fields that used through models
- in the old state so dependencies can be made from the through model
- deletion to the field that uses it.
- """
- self.kept_model_keys = self.old_model_keys & self.new_model_keys
- self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys
- self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys
- self.through_users = {}
- self.old_field_keys = {
- (app_label, model_name, x)
- for app_label, model_name in self.kept_model_keys
- for x, y in self.from_state.models[
- app_label,
- self.renamed_models.get((app_label, model_name), model_name)
- ].fields
- }
- self.new_field_keys = {
- (app_label, model_name, x)
- for app_label, model_name in self.kept_model_keys
- for x, y in self.to_state.models[app_label, model_name].fields
- }
-
- def _generate_through_model_map(self):
- """Through model map generation."""
- for app_label, model_name in sorted(self.old_model_keys):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- for field_name, field in old_model_state.fields:
- old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
- if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
- not old_field.remote_field.through._meta.auto_created):
- through_key = (
- old_field.remote_field.through._meta.app_label,
- old_field.remote_field.through._meta.model_name,
- )
- self.through_users[through_key] = (app_label, old_model_name, field_name)
-
- def _build_migration_list(self, graph=None):
- """
- Chop the lists of operations up into migrations with dependencies on
- each other. Do this by going through an app's list of operations until
- one is found that has an outgoing dependency that isn't in another
- app's migration yet (hasn't been chopped off its list). Then chop off
- the operations before it into a migration and move onto the next app.
- If the loops completes without doing anything, there's a circular
- dependency (which _should_ be impossible as the operations are
- all split at this point so they can't depend and be depended on).
- """
- self.migrations = {}
- num_ops = sum(len(x) for x in self.generated_operations.values())
- chop_mode = False
- while num_ops:
- # On every iteration, we step through all the apps and see if there
- # is a completed set of operations.
- # If we find that a subset of the operations are complete we can
- # try to chop it off from the rest and continue, but we only
- # do this if we've already been through the list once before
- # without any chopping and nothing has changed.
- for app_label in sorted(self.generated_operations):
- chopped = []
- dependencies = set()
- for operation in list(self.generated_operations[app_label]):
- deps_satisfied = True
- operation_dependencies = set()
- for dep in operation._auto_deps:
- is_swappable_dep = False
- if dep[0] == "__setting__":
- # We need to temporarily resolve the swappable dependency to prevent
- # circular references. While keeping the dependency checks on the
- # resolved model we still add the swappable dependencies.
- # See #23322
- resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
- original_dep = dep
- dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
- is_swappable_dep = True
- if dep[0] != app_label and dep[0] != "__setting__":
- # External app dependency. See if it's not yet
- # satisfied.
- for other_operation in self.generated_operations.get(dep[0], []):
- if self.check_dependency(other_operation, dep):
- deps_satisfied = False
- break
- if not deps_satisfied:
- break
- else:
- if is_swappable_dep:
- operation_dependencies.add((original_dep[0], original_dep[1]))
- elif dep[0] in self.migrations:
- operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name))
- else:
- # If we can't find the other app, we add a first/last dependency,
- # but only if we've already been through once and checked everything
- if chop_mode:
- # If the app already exists, we add a dependency on the last migration,
- # as we don't know which migration contains the target field.
- # If it's not yet migrated or has no migrations, we use __first__
- if graph and graph.leaf_nodes(dep[0]):
- operation_dependencies.add(graph.leaf_nodes(dep[0])[0])
- else:
- operation_dependencies.add((dep[0], "__first__"))
- else:
- deps_satisfied = False
- if deps_satisfied:
- chopped.append(operation)
- dependencies.update(operation_dependencies)
- self.generated_operations[app_label] = self.generated_operations[app_label][1:]
- else:
- break
- # Make a migration! Well, only if there's stuff to put in it
- if dependencies or chopped:
- if not self.generated_operations[app_label] or chop_mode:
- subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []})
- instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label)
- instance.dependencies = list(dependencies)
- instance.operations = chopped
- instance.initial = app_label not in self.existing_apps
- self.migrations.setdefault(app_label, []).append(instance)
- chop_mode = False
- else:
- self.generated_operations[app_label] = chopped + self.generated_operations[app_label]
- new_num_ops = sum(len(x) for x in self.generated_operations.values())
- if new_num_ops == num_ops:
- if not chop_mode:
- chop_mode = True
- else:
- raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations)
- num_ops = new_num_ops
-
- def _sort_migrations(self):
- """
- Reorder to make things possible. Reordering may be needed so FKs work
- nicely inside the same app.
- """
- for app_label, ops in sorted(self.generated_operations.items()):
- # construct a dependency graph for intra-app dependencies
- dependency_graph = {op: set() for op in ops}
- for op in ops:
- for dep in op._auto_deps:
- if dep[0] == app_label:
- for op2 in ops:
- if self.check_dependency(op2, dep):
- dependency_graph[op].add(op2)
-
- # we use a stable sort for deterministic tests & general behavior
- self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph)
-
- def _optimize_migrations(self):
- # Add in internal dependencies among the migrations
- for app_label, migrations in self.migrations.items():
- for m1, m2 in zip(migrations, migrations[1:]):
- m2.dependencies.append((app_label, m1.name))
-
- # De-dupe dependencies
- for app_label, migrations in self.migrations.items():
- for migration in migrations:
- migration.dependencies = list(set(migration.dependencies))
-
- # Optimize migrations
- for app_label, migrations in self.migrations.items():
- for migration in migrations:
- migration.operations = MigrationOptimizer().optimize(migration.operations, app_label=app_label)
-
- def check_dependency(self, operation, dependency):
- """
- Return True if the given operation depends on the given dependency,
- False otherwise.
- """
- # Created model
- if dependency[2] is None and dependency[3] is True:
- return (
- isinstance(operation, operations.CreateModel) and
- operation.name_lower == dependency[1].lower()
- )
- # Created field
- elif dependency[2] is not None and dependency[3] is True:
- return (
- (
- isinstance(operation, operations.CreateModel) and
- operation.name_lower == dependency[1].lower() and
- any(dependency[2] == x for x, y in operation.fields)
- ) or
- (
- isinstance(operation, operations.AddField) and
- operation.model_name_lower == dependency[1].lower() and
- operation.name_lower == dependency[2].lower()
- )
- )
- # Removed field
- elif dependency[2] is not None and dependency[3] is False:
- return (
- isinstance(operation, operations.RemoveField) and
- operation.model_name_lower == dependency[1].lower() and
- operation.name_lower == dependency[2].lower()
- )
- # Removed model
- elif dependency[2] is None and dependency[3] is False:
- return (
- isinstance(operation, operations.DeleteModel) and
- operation.name_lower == dependency[1].lower()
- )
- # Field being altered
- elif dependency[2] is not None and dependency[3] == "alter":
- return (
- isinstance(operation, operations.AlterField) and
- operation.model_name_lower == dependency[1].lower() and
- operation.name_lower == dependency[2].lower()
- )
- # order_with_respect_to being unset for a field
- elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
- return (
- isinstance(operation, operations.AlterOrderWithRespectTo) and
- operation.name_lower == dependency[1].lower() and
- (operation.order_with_respect_to or "").lower() != dependency[2].lower()
- )
- # Field is removed and part of an index/unique_together
- elif dependency[2] is not None and dependency[3] == "foo_together_change":
- return (
- isinstance(operation, (operations.AlterUniqueTogether,
- operations.AlterIndexTogether)) and
- operation.name_lower == dependency[1].lower()
- )
- # Unknown dependency. Raise an error.
- else:
- raise ValueError("Can't handle dependency %r" % (dependency, ))
-
- def add_operation(self, app_label, operation, dependencies=None, beginning=False):
- # Dependencies are (app_label, model_name, field_name, create/delete as True/False)
- operation._auto_deps = dependencies or []
- if beginning:
- self.generated_operations.setdefault(app_label, []).insert(0, operation)
- else:
- self.generated_operations.setdefault(app_label, []).append(operation)
-
- def swappable_first_key(self, item):
- """
- Place potential swappable models first in lists of created models (only
- real way to solve #22783).
- """
- try:
- model = self.new_apps.get_model(item[0], item[1])
- base_names = [base.__name__ for base in model.__bases__]
- string_version = "%s.%s" % (item[0], item[1])
- if (
- model._meta.swappable or
- "AbstractUser" in base_names or
- "AbstractBaseUser" in base_names or
- settings.AUTH_USER_MODEL.lower() == string_version.lower()
- ):
- return ("___" + item[0], "___" + item[1])
- except LookupError:
- pass
- return item
-
- def generate_renamed_models(self):
- """
- Find any renamed models, generate the operations for them, and remove
- the old entry from the model lists. Must be run before other
- model-level generation.
- """
- self.renamed_models = {}
- self.renamed_models_rel = {}
- added_models = self.new_model_keys - self.old_model_keys
- for app_label, model_name in sorted(added_models):
- model_state = self.to_state.models[app_label, model_name]
- model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
-
- removed_models = self.old_model_keys - self.new_model_keys
- for rem_app_label, rem_model_name in removed_models:
- if rem_app_label == app_label:
- rem_model_state = self.from_state.models[rem_app_label, rem_model_name]
- rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields)
- if model_fields_def == rem_model_fields_def:
- if self.questioner.ask_rename_model(rem_model_state, model_state):
- model_opts = self.new_apps.get_model(app_label, model_name)._meta
- dependencies = []
- for field in model_opts.get_fields():
- if field.is_relation:
- dependencies.extend(self._get_dependencies_for_foreign_key(field))
- self.add_operation(
- app_label,
- operations.RenameModel(
- old_name=rem_model_state.name,
- new_name=model_state.name,
- ),
- dependencies=dependencies,
- )
- self.renamed_models[app_label, model_name] = rem_model_name
- renamed_models_rel_key = '%s.%s' % (rem_model_state.app_label, rem_model_state.name)
- self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % (
- model_state.app_label,
- model_state.name,
- )
- self.old_model_keys.remove((rem_app_label, rem_model_name))
- self.old_model_keys.add((app_label, model_name))
- break
-
- def generate_created_models(self):
- """
- Find all new models (both managed and unmanaged) and make create
- operations for them as well as separate operations to create any
- foreign key or M2M relationships (these are optimized later, if
- possible).
-
- Defer any model options that refer to collections of fields that might
- be deferred (e.g. unique_together, index_together).
- """
- old_keys = self.old_model_keys | self.old_unmanaged_keys
- added_models = self.new_model_keys - old_keys
- added_unmanaged_models = self.new_unmanaged_keys - old_keys
- all_added_models = chain(
- sorted(added_models, key=self.swappable_first_key, reverse=True),
- sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True)
- )
- for app_label, model_name in all_added_models:
- model_state = self.to_state.models[app_label, model_name]
- model_opts = self.new_apps.get_model(app_label, model_name)._meta
- # Gather related fields
- related_fields = {}
- primary_key_rel = None
- for field in model_opts.local_fields:
- if field.remote_field:
- if field.remote_field.model:
- if field.primary_key:
- primary_key_rel = field.remote_field.model
- elif not field.remote_field.parent_link:
- related_fields[field.name] = field
- # through will be none on M2Ms on swapped-out models;
- # we can treat lack of through as auto_created=True, though.
- if (getattr(field.remote_field, "through", None) and
- not field.remote_field.through._meta.auto_created):
- related_fields[field.name] = field
- for field in model_opts.local_many_to_many:
- if field.remote_field.model:
- related_fields[field.name] = field
- if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
- related_fields[field.name] = field
- # Are there indexes/unique|index_together to defer?
- indexes = model_state.options.pop('indexes')
- unique_together = model_state.options.pop('unique_together', None)
- index_together = model_state.options.pop('index_together', None)
- order_with_respect_to = model_state.options.pop('order_with_respect_to', None)
- # Depend on the deletion of any possible proxy version of us
- dependencies = [
- (app_label, model_name, None, False),
- ]
- # Depend on all bases
- for base in model_state.bases:
- if isinstance(base, str) and "." in base:
- base_app_label, base_name = base.split(".", 1)
- dependencies.append((base_app_label, base_name, None, True))
- # Depend on the other end of the primary key if it's a relation
- if primary_key_rel:
- dependencies.append((
- primary_key_rel._meta.app_label,
- primary_key_rel._meta.object_name,
- None,
- True
- ))
- # Generate creation operation
- self.add_operation(
- app_label,
- operations.CreateModel(
- name=model_state.name,
- fields=[d for d in model_state.fields if d[0] not in related_fields],
- options=model_state.options,
- bases=model_state.bases,
- managers=model_state.managers,
- ),
- dependencies=dependencies,
- beginning=True,
- )
-
- # Don't add operations which modify the database for unmanaged models
- if not model_opts.managed:
- continue
-
- # Generate operations for each related field
- for name, field in sorted(related_fields.items()):
- dependencies = self._get_dependencies_for_foreign_key(field)
- # Depend on our own model being created
- dependencies.append((app_label, model_name, None, True))
- # Make operation
- self.add_operation(
- app_label,
- operations.AddField(
- model_name=model_name,
- name=name,
- field=field,
- ),
- dependencies=list(set(dependencies)),
- )
- # Generate other opns
- related_dependencies = [
- (app_label, model_name, name, True)
- for name, field in sorted(related_fields.items())
- ]
- related_dependencies.append((app_label, model_name, None, True))
- for index in indexes:
- self.add_operation(
- app_label,
- operations.AddIndex(
- model_name=model_name,
- index=index,
- ),
- dependencies=related_dependencies,
- )
- if unique_together:
- self.add_operation(
- app_label,
- operations.AlterUniqueTogether(
- name=model_name,
- unique_together=unique_together,
- ),
- dependencies=related_dependencies
- )
- if index_together:
- self.add_operation(
- app_label,
- operations.AlterIndexTogether(
- name=model_name,
- index_together=index_together,
- ),
- dependencies=related_dependencies
- )
- if order_with_respect_to:
- self.add_operation(
- app_label,
- operations.AlterOrderWithRespectTo(
- name=model_name,
- order_with_respect_to=order_with_respect_to,
- ),
- dependencies=[
- (app_label, model_name, order_with_respect_to, True),
- (app_label, model_name, None, True),
- ]
- )
-
- # Fix relationships if the model changed from a proxy model to a
- # concrete model.
- if (app_label, model_name) in self.old_proxy_keys:
- for related_object in model_opts.related_objects:
- self.add_operation(
- related_object.related_model._meta.app_label,
- operations.AlterField(
- model_name=related_object.related_model._meta.object_name,
- name=related_object.field.name,
- field=related_object.field,
- ),
- dependencies=[(app_label, model_name, None, True)],
- )
-
- def generate_created_proxies(self):
- """
- Make CreateModel statements for proxy models. Use the same statements
- as that way there's less code duplication, but of course for proxy
- models it's safe to skip all the pointless field stuff and just chuck
- out an operation.
- """
- added = self.new_proxy_keys - self.old_proxy_keys
- for app_label, model_name in sorted(added):
- model_state = self.to_state.models[app_label, model_name]
- assert model_state.options.get("proxy")
- # Depend on the deletion of any possible non-proxy version of us
- dependencies = [
- (app_label, model_name, None, False),
- ]
- # Depend on all bases
- for base in model_state.bases:
- if isinstance(base, str) and "." in base:
- base_app_label, base_name = base.split(".", 1)
- dependencies.append((base_app_label, base_name, None, True))
- # Generate creation operation
- self.add_operation(
- app_label,
- operations.CreateModel(
- name=model_state.name,
- fields=[],
- options=model_state.options,
- bases=model_state.bases,
- managers=model_state.managers,
- ),
- # Depend on the deletion of any possible non-proxy version of us
- dependencies=dependencies,
- )
-
- def generate_deleted_models(self):
- """
- Find all deleted models (managed and unmanaged) and make delete
- operations for them as well as separate operations to delete any
- foreign key or M2M relationships (these are optimized later, if
- possible).
-
- Also bring forward removal of any model options that refer to
- collections of fields - the inverse of generate_created_models().
- """
- new_keys = self.new_model_keys | self.new_unmanaged_keys
- deleted_models = self.old_model_keys - new_keys
- deleted_unmanaged_models = self.old_unmanaged_keys - new_keys
- all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models))
- for app_label, model_name in all_deleted_models:
- model_state = self.from_state.models[app_label, model_name]
- model = self.old_apps.get_model(app_label, model_name)
- if not model._meta.managed:
- # Skip here, no need to handle fields for unmanaged models
- continue
-
- # Gather related fields
- related_fields = {}
- for field in model._meta.local_fields:
- if field.remote_field:
- if field.remote_field.model:
- related_fields[field.name] = field
- # through will be none on M2Ms on swapped-out models;
- # we can treat lack of through as auto_created=True, though.
- if (getattr(field.remote_field, "through", None) and
- not field.remote_field.through._meta.auto_created):
- related_fields[field.name] = field
- for field in model._meta.local_many_to_many:
- if field.remote_field.model:
- related_fields[field.name] = field
- if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
- related_fields[field.name] = field
- # Generate option removal first
- unique_together = model_state.options.pop('unique_together', None)
- index_together = model_state.options.pop('index_together', None)
- if unique_together:
- self.add_operation(
- app_label,
- operations.AlterUniqueTogether(
- name=model_name,
- unique_together=None,
- )
- )
- if index_together:
- self.add_operation(
- app_label,
- operations.AlterIndexTogether(
- name=model_name,
- index_together=None,
- )
- )
- # Then remove each related field
- for name, field in sorted(related_fields.items()):
- self.add_operation(
- app_label,
- operations.RemoveField(
- model_name=model_name,
- name=name,
- )
- )
- # Finally, remove the model.
- # This depends on both the removal/alteration of all incoming fields
- # and the removal of all its own related fields, and if it's
- # a through model the field that references it.
- dependencies = []
- for related_object in model._meta.related_objects:
- related_object_app_label = related_object.related_model._meta.app_label
- object_name = related_object.related_model._meta.object_name
- field_name = related_object.field.name
- dependencies.append((related_object_app_label, object_name, field_name, False))
- if not related_object.many_to_many:
- dependencies.append((related_object_app_label, object_name, field_name, "alter"))
-
- for name, field in sorted(related_fields.items()):
- dependencies.append((app_label, model_name, name, False))
- # We're referenced in another field's through=
- through_user = self.through_users.get((app_label, model_state.name_lower))
- if through_user:
- dependencies.append((through_user[0], through_user[1], through_user[2], False))
- # Finally, make the operation, deduping any dependencies
- self.add_operation(
- app_label,
- operations.DeleteModel(
- name=model_state.name,
- ),
- dependencies=list(set(dependencies)),
- )
-
- def generate_deleted_proxies(self):
- """Make DeleteModel options for proxy models."""
- deleted = self.old_proxy_keys - self.new_proxy_keys
- for app_label, model_name in sorted(deleted):
- model_state = self.from_state.models[app_label, model_name]
- assert model_state.options.get("proxy")
- self.add_operation(
- app_label,
- operations.DeleteModel(
- name=model_state.name,
- ),
- )
-
- def generate_renamed_fields(self):
- """Work out renamed fields."""
- self.renamed_fields = {}
- for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
- # Scan to see if this is actually a rename!
- field_dec = self.deep_deconstruct(field)
- for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys):
- if rem_app_label == app_label and rem_model_name == model_name:
- old_field_dec = self.deep_deconstruct(old_model_state.get_field_by_name(rem_field_name))
- if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]:
- old_rel_to = old_field_dec[2]['to']
- if old_rel_to in self.renamed_models_rel:
- old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to]
- if old_field_dec == field_dec:
- if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
- self.add_operation(
- app_label,
- operations.RenameField(
- model_name=model_name,
- old_name=rem_field_name,
- new_name=field_name,
- )
- )
- self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
- self.old_field_keys.add((app_label, model_name, field_name))
- self.renamed_fields[app_label, model_name, field_name] = rem_field_name
- break
-
- def generate_added_fields(self):
- """Make AddField operations."""
- for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
- self._generate_added_field(app_label, model_name, field_name)
-
- def _generate_added_field(self, app_label, model_name, field_name):
- field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
- # Fields that are foreignkeys/m2ms depend on stuff
- dependencies = []
- if field.remote_field and field.remote_field.model:
- dependencies.extend(self._get_dependencies_for_foreign_key(field))
- # You can't just add NOT NULL fields with no default or fields
- # which don't allow empty strings as default.
- preserve_default = True
- time_fields = (models.DateField, models.DateTimeField, models.TimeField)
- if (not field.null and not field.has_default() and
- not field.many_to_many and
- not (field.blank and field.empty_strings_allowed) and
- not (isinstance(field, time_fields) and field.auto_now)):
- field = field.clone()
- if isinstance(field, time_fields) and field.auto_now_add:
- field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
- else:
- field.default = self.questioner.ask_not_null_addition(field_name, model_name)
- preserve_default = False
- self.add_operation(
- app_label,
- operations.AddField(
- model_name=model_name,
- name=field_name,
- field=field,
- preserve_default=preserve_default,
- ),
- dependencies=dependencies,
- )
-
- def generate_removed_fields(self):
- """Make RemoveField operations."""
- for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
- self._generate_removed_field(app_label, model_name, field_name)
-
- def _generate_removed_field(self, app_label, model_name, field_name):
- self.add_operation(
- app_label,
- operations.RemoveField(
- model_name=model_name,
- name=field_name,
- ),
- # We might need to depend on the removal of an
- # order_with_respect_to or index/unique_together operation;
- # this is safely ignored if there isn't one
- dependencies=[
- (app_label, model_name, field_name, "order_wrt_unset"),
- (app_label, model_name, field_name, "foo_together_change"),
- ],
- )
-
- def generate_altered_fields(self):
- """
- Make AlterField operations, or possibly RemovedField/AddField if alter
- isn's possible.
- """
- for app_label, model_name, field_name in sorted(self.old_field_keys & self.new_field_keys):
- # Did the field change?
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name)
- old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(old_field_name)
- new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
- # Implement any model renames on relations; these are handled by RenameModel
- # so we need to exclude them from the comparison
- if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None):
- rename_key = (
- new_field.remote_field.model._meta.app_label,
- new_field.remote_field.model._meta.model_name,
- )
- if rename_key in self.renamed_models:
- new_field.remote_field.model = old_field.remote_field.model
- if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "through", None):
- rename_key = (
- new_field.remote_field.through._meta.app_label,
- new_field.remote_field.through._meta.model_name,
- )
- if rename_key in self.renamed_models:
- new_field.remote_field.through = old_field.remote_field.through
- old_field_dec = self.deep_deconstruct(old_field)
- new_field_dec = self.deep_deconstruct(new_field)
- if old_field_dec != new_field_dec:
- both_m2m = old_field.many_to_many and new_field.many_to_many
- neither_m2m = not old_field.many_to_many and not new_field.many_to_many
- if both_m2m or neither_m2m:
- # Either both fields are m2m or neither is
- preserve_default = True
- if (old_field.null and not new_field.null and not new_field.has_default() and
- not new_field.many_to_many):
- field = new_field.clone()
- new_default = self.questioner.ask_not_null_alteration(field_name, model_name)
- if new_default is not models.NOT_PROVIDED:
- field.default = new_default
- preserve_default = False
- else:
- field = new_field
- self.add_operation(
- app_label,
- operations.AlterField(
- model_name=model_name,
- name=field_name,
- field=field,
- preserve_default=preserve_default,
- )
- )
- else:
- # We cannot alter between m2m and concrete fields
- self._generate_removed_field(app_label, model_name, field_name)
- self._generate_added_field(app_label, model_name, field_name)
-
- def create_altered_indexes(self):
- option_name = operations.AddIndex.option_name
- for app_label, model_name in sorted(self.kept_model_keys):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- new_model_state = self.to_state.models[app_label, model_name]
-
- old_indexes = old_model_state.options[option_name]
- new_indexes = new_model_state.options[option_name]
- add_idx = [idx for idx in new_indexes if idx not in old_indexes]
- rem_idx = [idx for idx in old_indexes if idx not in new_indexes]
-
- self.altered_indexes.update({
- (app_label, model_name): {
- 'added_indexes': add_idx, 'removed_indexes': rem_idx,
- }
- })
-
- def generate_added_indexes(self):
- for (app_label, model_name), alt_indexes in self.altered_indexes.items():
- for index in alt_indexes['added_indexes']:
- self.add_operation(
- app_label,
- operations.AddIndex(
- model_name=model_name,
- index=index,
- )
- )
-
- def generate_removed_indexes(self):
- for (app_label, model_name), alt_indexes in self.altered_indexes.items():
- for index in alt_indexes['removed_indexes']:
- self.add_operation(
- app_label,
- operations.RemoveIndex(
- model_name=model_name,
- name=index.name,
- )
- )
-
- def _get_dependencies_for_foreign_key(self, field):
- # Account for FKs to swappable models
- swappable_setting = getattr(field, 'swappable_setting', None)
- if swappable_setting is not None:
- dep_app_label = "__setting__"
- dep_object_name = swappable_setting
- else:
- dep_app_label = field.remote_field.model._meta.app_label
- dep_object_name = field.remote_field.model._meta.object_name
- dependencies = [(dep_app_label, dep_object_name, None, True)]
- if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
- dependencies.append((
- field.remote_field.through._meta.app_label,
- field.remote_field.through._meta.object_name,
- None,
- True,
- ))
- return dependencies
-
- def _generate_altered_foo_together(self, operation):
- option_name = operation.option_name
- for app_label, model_name in sorted(self.kept_model_keys):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- new_model_state = self.to_state.models[app_label, model_name]
-
- # We run the old version through the field renames to account for those
- old_value = old_model_state.options.get(option_name)
- old_value = {
- tuple(
- self.renamed_fields.get((app_label, model_name, n), n)
- for n in unique
- )
- for unique in old_value
- } if old_value else set()
-
- new_value = new_model_state.options.get(option_name)
- new_value = set(new_value) if new_value else set()
-
- if old_value != new_value:
- dependencies = []
- for foo_togethers in new_value:
- for field_name in foo_togethers:
- field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
- if field.remote_field and field.remote_field.model:
- dependencies.extend(self._get_dependencies_for_foreign_key(field))
-
- self.add_operation(
- app_label,
- operation(
- name=model_name,
- **{option_name: new_value}
- ),
- dependencies=dependencies,
- )
-
- def generate_altered_unique_together(self):
- self._generate_altered_foo_together(operations.AlterUniqueTogether)
-
- def generate_altered_index_together(self):
- self._generate_altered_foo_together(operations.AlterIndexTogether)
-
- def generate_altered_db_table(self):
- models_to_check = self.kept_model_keys.union(self.kept_proxy_keys, self.kept_unmanaged_keys)
- for app_label, model_name in sorted(models_to_check):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- new_model_state = self.to_state.models[app_label, model_name]
- old_db_table_name = old_model_state.options.get('db_table')
- new_db_table_name = new_model_state.options.get('db_table')
- if old_db_table_name != new_db_table_name:
- self.add_operation(
- app_label,
- operations.AlterModelTable(
- name=model_name,
- table=new_db_table_name,
- )
- )
-
- def generate_altered_options(self):
- """
- Work out if any non-schema-affecting options have changed and make an
- operation to represent them in state changes (in case Python code in
- migrations needs them).
- """
- models_to_check = self.kept_model_keys.union(
- self.kept_proxy_keys,
- self.kept_unmanaged_keys,
- # unmanaged converted to managed
- self.old_unmanaged_keys & self.new_model_keys,
- # managed converted to unmanaged
- self.old_model_keys & self.new_unmanaged_keys,
- )
-
- for app_label, model_name in sorted(models_to_check):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- new_model_state = self.to_state.models[app_label, model_name]
- old_options = {
- key: value for key, value in old_model_state.options.items()
- if key in AlterModelOptions.ALTER_OPTION_KEYS
- }
- new_options = {
- key: value for key, value in new_model_state.options.items()
- if key in AlterModelOptions.ALTER_OPTION_KEYS
- }
- if old_options != new_options:
- self.add_operation(
- app_label,
- operations.AlterModelOptions(
- name=model_name,
- options=new_options,
- )
- )
-
- def generate_altered_order_with_respect_to(self):
- for app_label, model_name in sorted(self.kept_model_keys):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- new_model_state = self.to_state.models[app_label, model_name]
- if (old_model_state.options.get("order_with_respect_to") !=
- new_model_state.options.get("order_with_respect_to")):
- # Make sure it comes second if we're adding
- # (removal dependency is part of RemoveField)
- dependencies = []
- if new_model_state.options.get("order_with_respect_to"):
- dependencies.append((
- app_label,
- model_name,
- new_model_state.options["order_with_respect_to"],
- True,
- ))
- # Actually generate the operation
- self.add_operation(
- app_label,
- operations.AlterOrderWithRespectTo(
- name=model_name,
- order_with_respect_to=new_model_state.options.get('order_with_respect_to'),
- ),
- dependencies=dependencies,
- )
-
- def generate_altered_managers(self):
- for app_label, model_name in sorted(self.kept_model_keys):
- old_model_name = self.renamed_models.get((app_label, model_name), model_name)
- old_model_state = self.from_state.models[app_label, old_model_name]
- new_model_state = self.to_state.models[app_label, model_name]
- if old_model_state.managers != new_model_state.managers:
- self.add_operation(
- app_label,
- operations.AlterModelManagers(
- name=model_name,
- managers=new_model_state.managers,
- )
- )
-
- def arrange_for_graph(self, changes, graph, migration_name=None):
- """
- Take a result from changes() and a MigrationGraph, and fix the names
- and dependencies of the changes so they extend the graph from the leaf
- nodes for each app.
- """
- leaves = graph.leaf_nodes()
- name_map = {}
- for app_label, migrations in list(changes.items()):
- if not migrations:
- continue
- # Find the app label's current leaf node
- app_leaf = None
- for leaf in leaves:
- if leaf[0] == app_label:
- app_leaf = leaf
- break
- # Do they want an initial migration for this app?
- if app_leaf is None and not self.questioner.ask_initial(app_label):
- # They don't.
- for migration in migrations:
- name_map[(app_label, migration.name)] = (app_label, "__first__")
- del changes[app_label]
- continue
- # Work out the next number in the sequence
- if app_leaf is None:
- next_number = 1
- else:
- next_number = (self.parse_number(app_leaf[1]) or 0) + 1
- # Name each migration
- for i, migration in enumerate(migrations):
- if i == 0 and app_leaf:
- migration.dependencies.append(app_leaf)
- if i == 0 and not app_leaf:
- new_name = "0001_%s" % migration_name if migration_name else "0001_initial"
- else:
- new_name = "%04i_%s" % (
- next_number,
- migration_name or self.suggest_name(migration.operations)[:100],
- )
- name_map[(app_label, migration.name)] = (app_label, new_name)
- next_number += 1
- migration.name = new_name
- # Now fix dependencies
- for app_label, migrations in changes.items():
- for migration in migrations:
- migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
- return changes
-
- def _trim_to_apps(self, changes, app_labels):
- """
- Take changes from arrange_for_graph() and set of app labels, and return
- a modified set of changes which trims out as many migrations that are
- not in app_labels as possible. Note that some other migrations may
- still be present as they may be required dependencies.
- """
- # Gather other app dependencies in a first pass
- app_dependencies = {}
- for app_label, migrations in changes.items():
- for migration in migrations:
- for dep_app_label, name in migration.dependencies:
- app_dependencies.setdefault(app_label, set()).add(dep_app_label)
- required_apps = set(app_labels)
- # Keep resolving till there's no change
- old_required_apps = None
- while old_required_apps != required_apps:
- old_required_apps = set(required_apps)
- required_apps.update(*[app_dependencies.get(app_label, ()) for app_label in required_apps])
- # Remove all migrations that aren't needed
- for app_label in list(changes):
- if app_label not in required_apps:
- del changes[app_label]
- return changes
-
- @classmethod
- def suggest_name(cls, ops):
- """
- Given a set of operations, suggest a name for the migration they might
- represent. Names are not guaranteed to be unique, but put some effort
- into the fallback name to avoid VCS conflicts if possible.
- """
- if len(ops) == 1:
- if isinstance(ops[0], operations.CreateModel):
- return ops[0].name_lower
- elif isinstance(ops[0], operations.DeleteModel):
- return "delete_%s" % ops[0].name_lower
- elif isinstance(ops[0], operations.AddField):
- return "%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
- elif isinstance(ops[0], operations.RemoveField):
- return "remove_%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
- elif len(ops) > 1:
- if all(isinstance(o, operations.CreateModel) for o in ops):
- return "_".join(sorted(o.name_lower for o in ops))
- return "auto_%s" % get_migration_name_timestamp()
-
- @classmethod
- def parse_number(cls, name):
- """
- Given a migration name, try to extract a number from the beginning of
- it. If no number is found, return None.
- """
- match = re.match(r'^\d+', name)
- if match:
- return int(match.group())
- return None
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/exceptions.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/exceptions.py
deleted file mode 100644
index c2e9ceb..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/exceptions.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from django.db.utils import DatabaseError
-
-
-class AmbiguityError(Exception):
- """More than one migration matches a name prefix."""
- pass
-
-
-class BadMigrationError(Exception):
- """There's a bad migration (unreadable/bad format/etc.)."""
- pass
-
-
-class CircularDependencyError(Exception):
- """There's an impossible-to-resolve circular dependency."""
- pass
-
-
-class InconsistentMigrationHistory(Exception):
- """An applied migration has some of its dependencies not applied."""
- pass
-
-
-class InvalidBasesError(ValueError):
- """A model's base classes can't be resolved."""
- pass
-
-
-class IrreversibleError(RuntimeError):
- """An irreversible migration is about to be reversed."""
- pass
-
-
-class NodeNotFoundError(LookupError):
- """An attempt on a node is made that is not available in the graph."""
-
- def __init__(self, message, node, origin=None):
- self.message = message
- self.origin = origin
- self.node = node
-
- def __str__(self):
- return self.message
-
- def __repr__(self):
- return "NodeNotFoundError(%r)" % (self.node, )
-
-
-class MigrationSchemaMissing(DatabaseError):
- pass
-
-
-class InvalidMigrationPlan(ValueError):
- pass
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/executor.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/executor.py
deleted file mode 100644
index ea7bc70..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/executor.py
+++ /dev/null
@@ -1,368 +0,0 @@
-from django.apps.registry import apps as global_apps
-from django.db import migrations, router
-
-from .exceptions import InvalidMigrationPlan
-from .loader import MigrationLoader
-from .recorder import MigrationRecorder
-from .state import ProjectState
-
-
-class MigrationExecutor:
- """
- End-to-end migration execution - load migrations and run them up or down
- to a specified set of targets.
- """
-
- def __init__(self, connection, progress_callback=None):
- self.connection = connection
- self.loader = MigrationLoader(self.connection)
- self.recorder = MigrationRecorder(self.connection)
- self.progress_callback = progress_callback
-
- def migration_plan(self, targets, clean_start=False):
- """
- Given a set of targets, return a list of (Migration instance, backwards?).
- """
- plan = []
- if clean_start:
- applied = set()
- else:
- applied = set(self.loader.applied_migrations)
- for target in targets:
- # If the target is (app_label, None), that means unmigrate everything
- if target[1] is None:
- for root in self.loader.graph.root_nodes():
- if root[0] == target[0]:
- for migration in self.loader.graph.backwards_plan(root):
- if migration in applied:
- plan.append((self.loader.graph.nodes[migration], True))
- applied.remove(migration)
- # If the migration is already applied, do backwards mode,
- # otherwise do forwards mode.
- elif target in applied:
- # Don't migrate backwards all the way to the target node (that
- # may roll back dependencies in other apps that don't need to
- # be rolled back); instead roll back through target's immediate
- # child(ren) in the same app, and no further.
- next_in_app = sorted(
- n for n in
- self.loader.graph.node_map[target].children
- if n[0] == target[0]
- )
- for node in next_in_app:
- for migration in self.loader.graph.backwards_plan(node):
- if migration in applied:
- plan.append((self.loader.graph.nodes[migration], True))
- applied.remove(migration)
- else:
- for migration in self.loader.graph.forwards_plan(target):
- if migration not in applied:
- plan.append((self.loader.graph.nodes[migration], False))
- applied.add(migration)
- return plan
-
- def _create_project_state(self, with_applied_migrations=False):
- """
- Create a project state including all the applications without
- migrations and applied migrations if with_applied_migrations=True.
- """
- state = ProjectState(real_apps=list(self.loader.unmigrated_apps))
- if with_applied_migrations:
- # Create the forwards plan Django would follow on an empty database
- full_plan = self.migration_plan(self.loader.graph.leaf_nodes(), clean_start=True)
- applied_migrations = {
- self.loader.graph.nodes[key] for key in self.loader.applied_migrations
- if key in self.loader.graph.nodes
- }
- for migration, _ in full_plan:
- if migration in applied_migrations:
- migration.mutate_state(state, preserve=False)
- return state
-
- def migrate(self, targets, plan=None, state=None, fake=False, fake_initial=False):
- """
- Migrate the database up to the given targets.
-
- Django first needs to create all project states before a migration is
- (un)applied and in a second step run all the database operations.
- """
- # The django_migrations table must be present to record applied
- # migrations.
- self.recorder.ensure_schema()
-
- if plan is None:
- plan = self.migration_plan(targets)
- # Create the forwards plan Django would follow on an empty database
- full_plan = self.migration_plan(self.loader.graph.leaf_nodes(), clean_start=True)
-
- all_forwards = all(not backwards for mig, backwards in plan)
- all_backwards = all(backwards for mig, backwards in plan)
-
- if not plan:
- if state is None:
- # The resulting state should include applied migrations.
- state = self._create_project_state(with_applied_migrations=True)
- elif all_forwards == all_backwards:
- # This should only happen if there's a mixed plan
- raise InvalidMigrationPlan(
- "Migration plans with both forwards and backwards migrations "
- "are not supported. Please split your migration process into "
- "separate plans of only forwards OR backwards migrations.",
- plan
- )
- elif all_forwards:
- if state is None:
- # The resulting state should still include applied migrations.
- state = self._create_project_state(with_applied_migrations=True)
- state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
- else:
- # No need to check for `elif all_backwards` here, as that condition
- # would always evaluate to true.
- state = self._migrate_all_backwards(plan, full_plan, fake=fake)
-
- self.check_replacements()
-
- return state
-
- def _migrate_all_forwards(self, state, plan, full_plan, fake, fake_initial):
- """
- Take a list of 2-tuples of the form (migration instance, False) and
- apply them in the order they occur in the full_plan.
- """
- migrations_to_run = {m[0] for m in plan}
- for migration, _ in full_plan:
- if not migrations_to_run:
- # We remove every migration that we applied from these sets so
- # that we can bail out once the last migration has been applied
- # and don't always run until the very end of the migration
- # process.
- break
- if migration in migrations_to_run:
- if 'apps' not in state.__dict__:
- if self.progress_callback:
- self.progress_callback("render_start")
- state.apps # Render all -- performance critical
- if self.progress_callback:
- self.progress_callback("render_success")
- state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
- migrations_to_run.remove(migration)
-
- return state
-
- def _migrate_all_backwards(self, plan, full_plan, fake):
- """
- Take a list of 2-tuples of the form (migration instance, True) and
- unapply them in reverse order they occur in the full_plan.
-
- Since unapplying a migration requires the project state prior to that
- migration, Django will compute the migration states before each of them
- in a first run over the plan and then unapply them in a second run over
- the plan.
- """
- migrations_to_run = {m[0] for m in plan}
- # Holds all migration states prior to the migrations being unapplied
- states = {}
- state = self._create_project_state()
- applied_migrations = {
- self.loader.graph.nodes[key] for key in self.loader.applied_migrations
- if key in self.loader.graph.nodes
- }
- if self.progress_callback:
- self.progress_callback("render_start")
- for migration, _ in full_plan:
- if not migrations_to_run:
- # We remove every migration that we applied from this set so
- # that we can bail out once the last migration has been applied
- # and don't always run until the very end of the migration
- # process.
- break
- if migration in migrations_to_run:
- if 'apps' not in state.__dict__:
- state.apps # Render all -- performance critical
- # The state before this migration
- states[migration] = state
- # The old state keeps as-is, we continue with the new state
- state = migration.mutate_state(state, preserve=True)
- migrations_to_run.remove(migration)
- elif migration in applied_migrations:
- # Only mutate the state if the migration is actually applied
- # to make sure the resulting state doesn't include changes
- # from unrelated migrations.
- migration.mutate_state(state, preserve=False)
- if self.progress_callback:
- self.progress_callback("render_success")
-
- for migration, _ in plan:
- self.unapply_migration(states[migration], migration, fake=fake)
- applied_migrations.remove(migration)
-
- # Generate the post migration state by starting from the state before
- # the last migration is unapplied and mutating it to include all the
- # remaining applied migrations.
- last_unapplied_migration = plan[-1][0]
- state = states[last_unapplied_migration]
- for index, (migration, _) in enumerate(full_plan):
- if migration == last_unapplied_migration:
- for migration, _ in full_plan[index:]:
- if migration in applied_migrations:
- migration.mutate_state(state, preserve=False)
- break
-
- return state
-
- def collect_sql(self, plan):
- """
- Take a migration plan and return a list of collected SQL statements
- that represent the best-efforts version of that plan.
- """
- statements = []
- state = None
- for migration, backwards in plan:
- with self.connection.schema_editor(collect_sql=True, atomic=migration.atomic) as schema_editor:
- if state is None:
- state = self.loader.project_state((migration.app_label, migration.name), at_end=False)
- if not backwards:
- state = migration.apply(state, schema_editor, collect_sql=True)
- else:
- state = migration.unapply(state, schema_editor, collect_sql=True)
- statements.extend(schema_editor.collected_sql)
- return statements
-
- def apply_migration(self, state, migration, fake=False, fake_initial=False):
- """Run a migration forwards."""
- if self.progress_callback:
- self.progress_callback("apply_start", migration, fake)
- if not fake:
- if fake_initial:
- # Test to see if this is an already-applied initial migration
- applied, state = self.detect_soft_applied(state, migration)
- if applied:
- fake = True
- if not fake:
- # Alright, do it normally
- with self.connection.schema_editor(atomic=migration.atomic) as schema_editor:
- state = migration.apply(state, schema_editor)
- # For replacement migrations, record individual statuses
- if migration.replaces:
- for app_label, name in migration.replaces:
- self.recorder.record_applied(app_label, name)
- else:
- self.recorder.record_applied(migration.app_label, migration.name)
- # Report progress
- if self.progress_callback:
- self.progress_callback("apply_success", migration, fake)
- return state
-
- def unapply_migration(self, state, migration, fake=False):
- """Run a migration backwards."""
- if self.progress_callback:
- self.progress_callback("unapply_start", migration, fake)
- if not fake:
- with self.connection.schema_editor(atomic=migration.atomic) as schema_editor:
- state = migration.unapply(state, schema_editor)
- # For replacement migrations, record individual statuses
- if migration.replaces:
- for app_label, name in migration.replaces:
- self.recorder.record_unapplied(app_label, name)
- else:
- self.recorder.record_unapplied(migration.app_label, migration.name)
- # Report progress
- if self.progress_callback:
- self.progress_callback("unapply_success", migration, fake)
- return state
-
- def check_replacements(self):
- """
- Mark replacement migrations applied if their replaced set all are.
-
- Do this unconditionally on every migrate, rather than just when
- migrations are applied or unapplied, to correctly handle the case
- when a new squash migration is pushed to a deployment that already had
- all its replaced migrations applied. In this case no new migration will
- be applied, but the applied state of the squashed migration must be
- maintained.
- """
- applied = self.recorder.applied_migrations()
- for key, migration in self.loader.replacements.items():
- all_applied = all(m in applied for m in migration.replaces)
- if all_applied and key not in applied:
- self.recorder.record_applied(*key)
-
- def detect_soft_applied(self, project_state, migration):
- """
- Test whether a migration has been implicitly applied - that the
- tables or columns it would create exist. This is intended only for use
- on initial migrations (as it only looks for CreateModel and AddField).
- """
- def should_skip_detecting_model(migration, model):
- """
- No need to detect tables for proxy models, unmanaged models, or
- models that can't be migrated on the current database.
- """
- return (
- model._meta.proxy or not model._meta.managed or not
- router.allow_migrate(
- self.connection.alias, migration.app_label,
- model_name=model._meta.model_name,
- )
- )
-
- if migration.initial is None:
- # Bail if the migration isn't the first one in its app
- if any(app == migration.app_label for app, name in migration.dependencies):
- return False, project_state
- elif migration.initial is False:
- # Bail if it's NOT an initial migration
- return False, project_state
-
- if project_state is None:
- after_state = self.loader.project_state((migration.app_label, migration.name), at_end=True)
- else:
- after_state = migration.mutate_state(project_state)
- apps = after_state.apps
- found_create_model_migration = False
- found_add_field_migration = False
- existing_table_names = self.connection.introspection.table_names(self.connection.cursor())
- # Make sure all create model and add field operations are done
- for operation in migration.operations:
- if isinstance(operation, migrations.CreateModel):
- model = apps.get_model(migration.app_label, operation.name)
- if model._meta.swapped:
- # We have to fetch the model to test with from the
- # main app cache, as it's not a direct dependency.
- model = global_apps.get_model(model._meta.swapped)
- if should_skip_detecting_model(migration, model):
- continue
- if model._meta.db_table not in existing_table_names:
- return False, project_state
- found_create_model_migration = True
- elif isinstance(operation, migrations.AddField):
- model = apps.get_model(migration.app_label, operation.model_name)
- if model._meta.swapped:
- # We have to fetch the model to test with from the
- # main app cache, as it's not a direct dependency.
- model = global_apps.get_model(model._meta.swapped)
- if should_skip_detecting_model(migration, model):
- continue
-
- table = model._meta.db_table
- field = model._meta.get_field(operation.name)
-
- # Handle implicit many-to-many tables created by AddField.
- if field.many_to_many:
- if field.remote_field.through._meta.db_table not in existing_table_names:
- return False, project_state
- else:
- found_add_field_migration = True
- continue
-
- column_names = [
- column.name for column in
- self.connection.introspection.get_table_description(self.connection.cursor(), table)
- ]
- if field.column not in column_names:
- return False, project_state
- found_add_field_migration = True
- # If we get this far and we found at least one CreateModel or AddField migration,
- # the migration is considered implicitly applied.
- return (found_create_model_migration or found_add_field_migration), after_state
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/graph.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/graph.py
deleted file mode 100644
index 687a9b3..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/graph.py
+++ /dev/null
@@ -1,380 +0,0 @@
-import warnings
-from functools import total_ordering
-
-from django.db.migrations.state import ProjectState
-from django.utils.datastructures import OrderedSet
-
-from .exceptions import CircularDependencyError, NodeNotFoundError
-
-RECURSION_DEPTH_WARNING = (
- "Maximum recursion depth exceeded while generating migration graph, "
- "falling back to iterative approach. If you're experiencing performance issues, "
- "consider squashing migrations as described at "
- "https://docs.djangoproject.com/en/dev/topics/migrations/#squashing-migrations."
-)
-
-
-@total_ordering
-class Node:
- """
- A single node in the migration graph. Contains direct links to adjacent
- nodes in either direction.
- """
- def __init__(self, key):
- self.key = key
- self.children = set()
- self.parents = set()
-
- def __eq__(self, other):
- return self.key == other
-
- def __lt__(self, other):
- return self.key < other
-
- def __hash__(self):
- return hash(self.key)
-
- def __getitem__(self, item):
- return self.key[item]
-
- def __str__(self):
- return str(self.key)
-
- def __repr__(self):
- return '<%s: (%r, %r)>' % (self.__class__.__name__, self.key[0], self.key[1])
-
- def add_child(self, child):
- self.children.add(child)
-
- def add_parent(self, parent):
- self.parents.add(parent)
-
- # Use manual caching, @cached_property effectively doubles the
- # recursion depth for each recursion.
- def ancestors(self):
- # Use self.key instead of self to speed up the frequent hashing
- # when constructing an OrderedSet.
- if '_ancestors' not in self.__dict__:
- ancestors = []
- for parent in sorted(self.parents, reverse=True):
- ancestors += parent.ancestors()
- ancestors.append(self.key)
- self.__dict__['_ancestors'] = list(OrderedSet(ancestors))
- return self.__dict__['_ancestors']
-
- # Use manual caching, @cached_property effectively doubles the
- # recursion depth for each recursion.
- def descendants(self):
- # Use self.key instead of self to speed up the frequent hashing
- # when constructing an OrderedSet.
- if '_descendants' not in self.__dict__:
- descendants = []
- for child in sorted(self.children, reverse=True):
- descendants += child.descendants()
- descendants.append(self.key)
- self.__dict__['_descendants'] = list(OrderedSet(descendants))
- return self.__dict__['_descendants']
-
-
-class DummyNode(Node):
- def __init__(self, key, origin, error_message):
- super().__init__(key)
- self.origin = origin
- self.error_message = error_message
-
- def promote(self):
- """
- Transition dummy to a normal node and clean off excess attribs.
- Creating a Node object from scratch would be too much of a
- hassle as many dependendies would need to be remapped.
- """
- del self.origin
- del self.error_message
- self.__class__ = Node
-
- def raise_error(self):
- raise NodeNotFoundError(self.error_message, self.key, origin=self.origin)
-
-
-class MigrationGraph:
- """
- Represent the digraph of all migrations in a project.
-
- Each migration is a node, and each dependency is an edge. There are
- no implicit dependencies between numbered migrations - the numbering is
- merely a convention to aid file listing. Every new numbered migration
- has a declared dependency to the previous number, meaning that VCS
- branch merges can be detected and resolved.
-
- Migrations files can be marked as replacing another set of migrations -
- this is to support the "squash" feature. The graph handler isn't responsible
- for these; instead, the code to load them in here should examine the
- migration files and if the replaced migrations are all either unapplied
- or not present, it should ignore the replaced ones, load in just the
- replacing migration, and repoint any dependencies that pointed to the
- replaced migrations to point to the replacing one.
-
- A node should be a tuple: (app_path, migration_name). The tree special-cases
- things within an app - namely, root nodes and leaf nodes ignore dependencies
- to other apps.
- """
-
- def __init__(self):
- self.node_map = {}
- self.nodes = {}
- self.cached = False
-
- def add_node(self, key, migration):
- # If the key already exists, then it must be a dummy node.
- dummy_node = self.node_map.get(key)
- if dummy_node:
- # Promote DummyNode to Node.
- dummy_node.promote()
- else:
- node = Node(key)
- self.node_map[key] = node
- self.nodes[key] = migration
- self.clear_cache()
-
- def add_dummy_node(self, key, origin, error_message):
- node = DummyNode(key, origin, error_message)
- self.node_map[key] = node
- self.nodes[key] = None
-
- def add_dependency(self, migration, child, parent, skip_validation=False):
- """
- This may create dummy nodes if they don't yet exist. If
- `skip_validation=True`, validate_consistency() should be called
- afterwards.
- """
- if child not in self.nodes:
- error_message = (
- "Migration %s dependencies reference nonexistent"
- " child node %r" % (migration, child)
- )
- self.add_dummy_node(child, migration, error_message)
- if parent not in self.nodes:
- error_message = (
- "Migration %s dependencies reference nonexistent"
- " parent node %r" % (migration, parent)
- )
- self.add_dummy_node(parent, migration, error_message)
- self.node_map[child].add_parent(self.node_map[parent])
- self.node_map[parent].add_child(self.node_map[child])
- if not skip_validation:
- self.validate_consistency()
- self.clear_cache()
-
- def remove_replaced_nodes(self, replacement, replaced):
- """
- Remove each of the `replaced` nodes (when they exist). Any
- dependencies that were referencing them are changed to reference the
- `replacement` node instead.
- """
- # Cast list of replaced keys to set to speed up lookup later.
- replaced = set(replaced)
- try:
- replacement_node = self.node_map[replacement]
- except KeyError as err:
- raise NodeNotFoundError(
- "Unable to find replacement node %r. It was either never added"
- " to the migration graph, or has been removed." % (replacement, ),
- replacement
- ) from err
- for replaced_key in replaced:
- self.nodes.pop(replaced_key, None)
- replaced_node = self.node_map.pop(replaced_key, None)
- if replaced_node:
- for child in replaced_node.children:
- child.parents.remove(replaced_node)
- # We don't want to create dependencies between the replaced
- # node and the replacement node as this would lead to
- # self-referencing on the replacement node at a later iteration.
- if child.key not in replaced:
- replacement_node.add_child(child)
- child.add_parent(replacement_node)
- for parent in replaced_node.parents:
- parent.children.remove(replaced_node)
- # Again, to avoid self-referencing.
- if parent.key not in replaced:
- replacement_node.add_parent(parent)
- parent.add_child(replacement_node)
- self.clear_cache()
-
- def remove_replacement_node(self, replacement, replaced):
- """
- The inverse operation to `remove_replaced_nodes`. Almost. Remove the
- replacement node `replacement` and remap its child nodes to `replaced`
- - the list of nodes it would have replaced. Don't remap its parent
- nodes as they are expected to be correct already.
- """
- self.nodes.pop(replacement, None)
- try:
- replacement_node = self.node_map.pop(replacement)
- except KeyError as err:
- raise NodeNotFoundError(
- "Unable to remove replacement node %r. It was either never added"
- " to the migration graph, or has been removed already." % (replacement, ),
- replacement
- ) from err
- replaced_nodes = set()
- replaced_nodes_parents = set()
- for key in replaced:
- replaced_node = self.node_map.get(key)
- if replaced_node:
- replaced_nodes.add(replaced_node)
- replaced_nodes_parents |= replaced_node.parents
- # We're only interested in the latest replaced node, so filter out
- # replaced nodes that are parents of other replaced nodes.
- replaced_nodes -= replaced_nodes_parents
- for child in replacement_node.children:
- child.parents.remove(replacement_node)
- for replaced_node in replaced_nodes:
- replaced_node.add_child(child)
- child.add_parent(replaced_node)
- for parent in replacement_node.parents:
- parent.children.remove(replacement_node)
- # NOTE: There is no need to remap parent dependencies as we can
- # assume the replaced nodes already have the correct ancestry.
- self.clear_cache()
-
- def validate_consistency(self):
- """Ensure there are no dummy nodes remaining in the graph."""
- [n.raise_error() for n in self.node_map.values() if isinstance(n, DummyNode)]
-
- def clear_cache(self):
- if self.cached:
- for node in self.nodes:
- self.node_map[node].__dict__.pop('_ancestors', None)
- self.node_map[node].__dict__.pop('_descendants', None)
- self.cached = False
-
- def forwards_plan(self, target):
- """
- Given a node, return a list of which previous nodes (dependencies) must
- be applied, ending with the node itself. This is the list you would
- follow if applying the migrations to a database.
- """
- if target not in self.nodes:
- raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
- # Use parent.key instead of parent to speed up the frequent hashing in ensure_not_cyclic
- self.ensure_not_cyclic(target, lambda x: (parent.key for parent in self.node_map[x].parents))
- self.cached = True
- node = self.node_map[target]
- try:
- return node.ancestors()
- except RuntimeError:
- # fallback to iterative dfs
- warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
- return self.iterative_dfs(node)
-
- def backwards_plan(self, target):
- """
- Given a node, return a list of which dependent nodes (dependencies)
- must be unapplied, ending with the node itself. This is the list you
- would follow if removing the migrations from a database.
- """
- if target not in self.nodes:
- raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
- # Use child.key instead of child to speed up the frequent hashing in ensure_not_cyclic
- self.ensure_not_cyclic(target, lambda x: (child.key for child in self.node_map[x].children))
- self.cached = True
- node = self.node_map[target]
- try:
- return node.descendants()
- except RuntimeError:
- # fallback to iterative dfs
- warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
- return self.iterative_dfs(node, forwards=False)
-
- def iterative_dfs(self, start, forwards=True):
- """Iterative depth-first search for finding dependencies."""
- visited = []
- stack = [start]
- while stack:
- node = stack.pop()
- visited.append(node)
- stack += sorted(node.parents if forwards else node.children)
- return list(OrderedSet(reversed(visited)))
-
- def root_nodes(self, app=None):
- """
- Return all root nodes - that is, nodes with no dependencies inside
- their app. These are the starting point for an app.
- """
- roots = set()
- for node in self.nodes:
- if not any(key[0] == node[0] for key in self.node_map[node].parents) and (not app or app == node[0]):
- roots.add(node)
- return sorted(roots)
-
- def leaf_nodes(self, app=None):
- """
- Return all leaf nodes - that is, nodes with no dependents in their app.
- These are the "most current" version of an app's schema.
- Having more than one per app is technically an error, but one that
- gets handled further up, in the interactive command - it's usually the
- result of a VCS merge and needs some user input.
- """
- leaves = set()
- for node in self.nodes:
- if not any(key[0] == node[0] for key in self.node_map[node].children) and (not app or app == node[0]):
- leaves.add(node)
- return sorted(leaves)
-
- def ensure_not_cyclic(self, start, get_children):
- # Algo from GvR:
- # http://neopythonic.blogspot.co.uk/2009/01/detecting-cycles-in-directed-graph.html
- todo = set(self.nodes)
- while todo:
- node = todo.pop()
- stack = [node]
- while stack:
- top = stack[-1]
- for node in get_children(top):
- if node in stack:
- cycle = stack[stack.index(node):]
- raise CircularDependencyError(", ".join("%s.%s" % n for n in cycle))
- if node in todo:
- stack.append(node)
- todo.remove(node)
- break
- else:
- node = stack.pop()
-
- def __str__(self):
- return 'Graph: %s nodes, %s edges' % self._nodes_and_edges()
-
- def __repr__(self):
- nodes, edges = self._nodes_and_edges()
- return '<%s: nodes=%s, edges=%s>' % (self.__class__.__name__, nodes, edges)
-
- def _nodes_and_edges(self):
- return len(self.nodes), sum(len(node.parents) for node in self.node_map.values())
-
- def make_state(self, nodes=None, at_end=True, real_apps=None):
- """
- Given a migration node or nodes, return a complete ProjectState for it.
- If at_end is False, return the state before the migration has run.
- If nodes is not provided, return the overall most current project state.
- """
- if nodes is None:
- nodes = list(self.leaf_nodes())
- if len(nodes) == 0:
- return ProjectState()
- if not isinstance(nodes[0], tuple):
- nodes = [nodes]
- plan = []
- for node in nodes:
- for migration in self.forwards_plan(node):
- if migration not in plan:
- if not at_end and migration in nodes:
- continue
- plan.append(migration)
- project_state = ProjectState(real_apps=real_apps)
- for node in plan:
- project_state = self.nodes[node].mutate_state(project_state, preserve=False)
- return project_state
-
- def __contains__(self, node):
- return node in self.nodes
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/loader.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/loader.py
deleted file mode 100644
index 8a1cf43..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/loader.py
+++ /dev/null
@@ -1,316 +0,0 @@
-import os
-import sys
-from importlib import import_module, reload
-
-from django.apps import apps
-from django.conf import settings
-from django.db.migrations.graph import MigrationGraph
-from django.db.migrations.recorder import MigrationRecorder
-
-from .exceptions import (
- AmbiguityError, BadMigrationError, InconsistentMigrationHistory,
- NodeNotFoundError,
-)
-
-MIGRATIONS_MODULE_NAME = 'migrations'
-
-
-class MigrationLoader:
- """
- Load migration files from disk and their status from the database.
-
- Migration files are expected to live in the "migrations" directory of
- an app. Their names are entirely unimportant from a code perspective,
- but will probably follow the 1234_name.py convention.
-
- On initialization, this class will scan those directories, and open and
- read the python files, looking for a class called Migration, which should
- inherit from django.db.migrations.Migration. See
- django.db.migrations.migration for what that looks like.
-
- Some migrations will be marked as "replacing" another set of migrations.
- These are loaded into a separate set of migrations away from the main ones.
- If all the migrations they replace are either unapplied or missing from
- disk, then they are injected into the main set, replacing the named migrations.
- Any dependency pointers to the replaced migrations are re-pointed to the
- new migration.
-
- This does mean that this class MUST also talk to the database as well as
- to disk, but this is probably fine. We're already not just operating
- in memory.
- """
-
- def __init__(self, connection, load=True, ignore_no_migrations=False):
- self.connection = connection
- self.disk_migrations = None
- self.applied_migrations = None
- self.ignore_no_migrations = ignore_no_migrations
- if load:
- self.build_graph()
-
- @classmethod
- def migrations_module(cls, app_label):
- """
- Return the path to the migrations module for the specified app_label
- and a boolean indicating if the module is specified in
- settings.MIGRATION_MODULE.
- """
- if app_label in settings.MIGRATION_MODULES:
- return settings.MIGRATION_MODULES[app_label], True
- else:
- app_package_name = apps.get_app_config(app_label).name
- return '%s.%s' % (app_package_name, MIGRATIONS_MODULE_NAME), False
-
- def load_disk(self):
- """Load the migrations from all INSTALLED_APPS from disk."""
- self.disk_migrations = {}
- self.unmigrated_apps = set()
- self.migrated_apps = set()
- for app_config in apps.get_app_configs():
- # Get the migrations module directory
- module_name, explicit = self.migrations_module(app_config.label)
- if module_name is None:
- self.unmigrated_apps.add(app_config.label)
- continue
- was_loaded = module_name in sys.modules
- try:
- module = import_module(module_name)
- except ImportError as e:
- # I hate doing this, but I don't want to squash other import errors.
- # Might be better to try a directory check directly.
- if ((explicit and self.ignore_no_migrations) or (
- not explicit and "No module named" in str(e) and MIGRATIONS_MODULE_NAME in str(e))):
- self.unmigrated_apps.add(app_config.label)
- continue
- raise
- else:
- # PY3 will happily import empty dirs as namespaces.
- if not hasattr(module, '__file__'):
- self.unmigrated_apps.add(app_config.label)
- continue
- # Module is not a package (e.g. migrations.py).
- if not hasattr(module, '__path__'):
- self.unmigrated_apps.add(app_config.label)
- continue
- # Force a reload if it's already loaded (tests need this)
- if was_loaded:
- reload(module)
- self.migrated_apps.add(app_config.label)
- directory = os.path.dirname(module.__file__)
- # Scan for .py files
- migration_names = set()
- for name in os.listdir(directory):
- if name.endswith(".py"):
- import_name = name.rsplit(".", 1)[0]
- if import_name[0] not in "_.~":
- migration_names.add(import_name)
- # Load them
- for migration_name in migration_names:
- migration_module = import_module("%s.%s" % (module_name, migration_name))
- if not hasattr(migration_module, "Migration"):
- raise BadMigrationError(
- "Migration %s in app %s has no Migration class" % (migration_name, app_config.label)
- )
- self.disk_migrations[app_config.label, migration_name] = migration_module.Migration(
- migration_name,
- app_config.label,
- )
-
- def get_migration(self, app_label, name_prefix):
- """Return the named migration or raise NodeNotFoundError."""
- return self.graph.nodes[app_label, name_prefix]
-
- def get_migration_by_prefix(self, app_label, name_prefix):
- """
- Return the migration(s) which match the given app label and name_prefix.
- """
- # Do the search
- results = []
- for migration_app_label, migration_name in self.disk_migrations:
- if migration_app_label == app_label and migration_name.startswith(name_prefix):
- results.append((migration_app_label, migration_name))
- if len(results) > 1:
- raise AmbiguityError(
- "There is more than one migration for '%s' with the prefix '%s'" % (app_label, name_prefix)
- )
- elif len(results) == 0:
- raise KeyError("There no migrations for '%s' with the prefix '%s'" % (app_label, name_prefix))
- else:
- return self.disk_migrations[results[0]]
-
- def check_key(self, key, current_app):
- if (key[1] != "__first__" and key[1] != "__latest__") or key in self.graph:
- return key
- # Special-case __first__, which means "the first migration" for
- # migrated apps, and is ignored for unmigrated apps. It allows
- # makemigrations to declare dependencies on apps before they even have
- # migrations.
- if key[0] == current_app:
- # Ignore __first__ references to the same app (#22325)
- return
- if key[0] in self.unmigrated_apps:
- # This app isn't migrated, but something depends on it.
- # The models will get auto-added into the state, though
- # so we're fine.
- return
- if key[0] in self.migrated_apps:
- try:
- if key[1] == "__first__":
- return self.graph.root_nodes(key[0])[0]
- else: # "__latest__"
- return self.graph.leaf_nodes(key[0])[0]
- except IndexError:
- if self.ignore_no_migrations:
- return None
- else:
- raise ValueError("Dependency on app with no migrations: %s" % key[0])
- raise ValueError("Dependency on unknown app: %s" % key[0])
-
- def add_internal_dependencies(self, key, migration):
- """
- Internal dependencies need to be added first to ensure `__first__`
- dependencies find the correct root node.
- """
- for parent in migration.dependencies:
- if parent[0] != key[0] or parent[1] == '__first__':
- # Ignore __first__ references to the same app (#22325).
- continue
- self.graph.add_dependency(migration, key, parent, skip_validation=True)
-
- def add_external_dependencies(self, key, migration):
- for parent in migration.dependencies:
- # Skip internal dependencies
- if key[0] == parent[0]:
- continue
- parent = self.check_key(parent, key[0])
- if parent is not None:
- self.graph.add_dependency(migration, key, parent, skip_validation=True)
- for child in migration.run_before:
- child = self.check_key(child, key[0])
- if child is not None:
- self.graph.add_dependency(migration, child, key, skip_validation=True)
-
- def build_graph(self):
- """
- Build a migration dependency graph using both the disk and database.
- You'll need to rebuild the graph if you apply migrations. This isn't
- usually a problem as generally migration stuff runs in a one-shot process.
- """
- # Load disk data
- self.load_disk()
- # Load database data
- if self.connection is None:
- self.applied_migrations = set()
- else:
- recorder = MigrationRecorder(self.connection)
- self.applied_migrations = recorder.applied_migrations()
- # To start, populate the migration graph with nodes for ALL migrations
- # and their dependencies. Also make note of replacing migrations at this step.
- self.graph = MigrationGraph()
- self.replacements = {}
- for key, migration in self.disk_migrations.items():
- self.graph.add_node(key, migration)
- # Internal (aka same-app) dependencies.
- self.add_internal_dependencies(key, migration)
- # Replacing migrations.
- if migration.replaces:
- self.replacements[key] = migration
- # Add external dependencies now that the internal ones have been resolved.
- for key, migration in self.disk_migrations.items():
- self.add_external_dependencies(key, migration)
- # Carry out replacements where possible.
- for key, migration in self.replacements.items():
- # Get applied status of each of this migration's replacement targets.
- applied_statuses = [(target in self.applied_migrations) for target in migration.replaces]
- # Ensure the replacing migration is only marked as applied if all of
- # its replacement targets are.
- if all(applied_statuses):
- self.applied_migrations.add(key)
- else:
- self.applied_migrations.discard(key)
- # A replacing migration can be used if either all or none of its
- # replacement targets have been applied.
- if all(applied_statuses) or (not any(applied_statuses)):
- self.graph.remove_replaced_nodes(key, migration.replaces)
- else:
- # This replacing migration cannot be used because it is partially applied.
- # Remove it from the graph and remap dependencies to it (#25945).
- self.graph.remove_replacement_node(key, migration.replaces)
- # Ensure the graph is consistent.
- try:
- self.graph.validate_consistency()
- except NodeNotFoundError as exc:
- # Check if the missing node could have been replaced by any squash
- # migration but wasn't because the squash migration was partially
- # applied before. In that case raise a more understandable exception
- # (#23556).
- # Get reverse replacements.
- reverse_replacements = {}
- for key, migration in self.replacements.items():
- for replaced in migration.replaces:
- reverse_replacements.setdefault(replaced, set()).add(key)
- # Try to reraise exception with more detail.
- if exc.node in reverse_replacements:
- candidates = reverse_replacements.get(exc.node, set())
- is_replaced = any(candidate in self.graph.nodes for candidate in candidates)
- if not is_replaced:
- tries = ', '.join('%s.%s' % c for c in candidates)
- raise NodeNotFoundError(
- "Migration {0} depends on nonexistent node ('{1}', '{2}'). "
- "Django tried to replace migration {1}.{2} with any of [{3}] "
- "but wasn't able to because some of the replaced migrations "
- "are already applied.".format(
- exc.origin, exc.node[0], exc.node[1], tries
- ),
- exc.node
- ) from exc
- raise exc
-
- def check_consistent_history(self, connection):
- """
- Raise InconsistentMigrationHistory if any applied migrations have
- unapplied dependencies.
- """
- recorder = MigrationRecorder(connection)
- applied = recorder.applied_migrations()
- for migration in applied:
- # If the migration is unknown, skip it.
- if migration not in self.graph.nodes:
- continue
- for parent in self.graph.node_map[migration].parents:
- if parent not in applied:
- # Skip unapplied squashed migrations that have all of their
- # `replaces` applied.
- if parent in self.replacements:
- if all(m in applied for m in self.replacements[parent].replaces):
- continue
- raise InconsistentMigrationHistory(
- "Migration {}.{} is applied before its dependency "
- "{}.{} on database '{}'.".format(
- migration[0], migration[1], parent[0], parent[1],
- connection.alias,
- )
- )
-
- def detect_conflicts(self):
- """
- Look through the loaded graph and detect any conflicts - apps
- with more than one leaf migration. Return a dict of the app labels
- that conflict with the migration names that conflict.
- """
- seen_apps = {}
- conflicting_apps = set()
- for app_label, migration_name in self.graph.leaf_nodes():
- if app_label in seen_apps:
- conflicting_apps.add(app_label)
- seen_apps.setdefault(app_label, set()).add(migration_name)
- return {app_label: seen_apps[app_label] for app_label in conflicting_apps}
-
- def project_state(self, nodes=None, at_end=True):
- """
- Return a ProjectState object representing the most recent state
- that the loaded migrations represent.
-
- See graph.make_state() for the meaning of "nodes" and "at_end".
- """
- return self.graph.make_state(nodes=nodes, at_end=at_end, real_apps=list(self.unmigrated_apps))
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/migration.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/migration.py
deleted file mode 100644
index ffe0b1f..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/migration.py
+++ /dev/null
@@ -1,191 +0,0 @@
-from django.db.transaction import atomic
-
-from .exceptions import IrreversibleError
-
-
-class Migration:
- """
- The base class for all migrations.
-
- Migration files will import this from django.db.migrations.Migration
- and subclass it as a class called Migration. It will have one or more
- of the following attributes:
-
- - operations: A list of Operation instances, probably from django.db.migrations.operations
- - dependencies: A list of tuples of (app_path, migration_name)
- - run_before: A list of tuples of (app_path, migration_name)
- - replaces: A list of migration_names
-
- Note that all migrations come out of migrations and into the Loader or
- Graph as instances, having been initialized with their app label and name.
- """
-
- # Operations to apply during this migration, in order.
- operations = []
-
- # Other migrations that should be run before this migration.
- # Should be a list of (app, migration_name).
- dependencies = []
-
- # Other migrations that should be run after this one (i.e. have
- # this migration added to their dependencies). Useful to make third-party
- # apps' migrations run after your AUTH_USER replacement, for example.
- run_before = []
-
- # Migration names in this app that this migration replaces. If this is
- # non-empty, this migration will only be applied if all these migrations
- # are not applied.
- replaces = []
-
- # Is this an initial migration? Initial migrations are skipped on
- # --fake-initial if the table or fields already exist. If None, check if
- # the migration has any dependencies to determine if there are dependencies
- # to tell if db introspection needs to be done. If True, always perform
- # introspection. If False, never perform introspection.
- initial = None
-
- # Whether to wrap the whole migration in a transaction. Only has an effect
- # on database backends which support transactional DDL.
- atomic = True
-
- def __init__(self, name, app_label):
- self.name = name
- self.app_label = app_label
- # Copy dependencies & other attrs as we might mutate them at runtime
- self.operations = list(self.__class__.operations)
- self.dependencies = list(self.__class__.dependencies)
- self.run_before = list(self.__class__.run_before)
- self.replaces = list(self.__class__.replaces)
-
- def __eq__(self, other):
- if not isinstance(other, Migration):
- return False
- return (self.name == other.name) and (self.app_label == other.app_label)
-
- def __repr__(self):
- return "" % (self.app_label, self.name)
-
- def __str__(self):
- return "%s.%s" % (self.app_label, self.name)
-
- def __hash__(self):
- return hash("%s.%s" % (self.app_label, self.name))
-
- def mutate_state(self, project_state, preserve=True):
- """
- Take a ProjectState and return a new one with the migration's
- operations applied to it. Preserve the original object state by
- default and return a mutated state from a copy.
- """
- new_state = project_state
- if preserve:
- new_state = project_state.clone()
-
- for operation in self.operations:
- operation.state_forwards(self.app_label, new_state)
- return new_state
-
- def apply(self, project_state, schema_editor, collect_sql=False):
- """
- Take a project_state representing all migrations prior to this one
- and a schema_editor for a live database and apply the migration
- in a forwards order.
-
- Return the resulting project state for efficient reuse by following
- Migrations.
- """
- for operation in self.operations:
- # If this operation cannot be represented as SQL, place a comment
- # there instead
- if collect_sql:
- schema_editor.collected_sql.append("--")
- if not operation.reduces_to_sql:
- schema_editor.collected_sql.append(
- "-- MIGRATION NOW PERFORMS OPERATION THAT CANNOT BE WRITTEN AS SQL:"
- )
- schema_editor.collected_sql.append("-- %s" % operation.describe())
- schema_editor.collected_sql.append("--")
- if not operation.reduces_to_sql:
- continue
- # Save the state before the operation has run
- old_state = project_state.clone()
- operation.state_forwards(self.app_label, project_state)
- # Run the operation
- atomic_operation = operation.atomic or (self.atomic and operation.atomic is not False)
- if not schema_editor.atomic_migration and atomic_operation:
- # Force a transaction on a non-transactional-DDL backend or an
- # atomic operation inside a non-atomic migration.
- with atomic(schema_editor.connection.alias):
- operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
- else:
- # Normal behaviour
- operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
- return project_state
-
- def unapply(self, project_state, schema_editor, collect_sql=False):
- """
- Take a project_state representing all migrations prior to this one
- and a schema_editor for a live database and apply the migration
- in a reverse order.
-
- The backwards migration process consists of two phases:
-
- 1. The intermediate states from right before the first until right
- after the last operation inside this migration are preserved.
- 2. The operations are applied in reverse order using the states
- recorded in step 1.
- """
- # Construct all the intermediate states we need for a reverse migration
- to_run = []
- new_state = project_state
- # Phase 1
- for operation in self.operations:
- # If it's irreversible, error out
- if not operation.reversible:
- raise IrreversibleError("Operation %s in %s is not reversible" % (operation, self))
- # Preserve new state from previous run to not tamper the same state
- # over all operations
- new_state = new_state.clone()
- old_state = new_state.clone()
- operation.state_forwards(self.app_label, new_state)
- to_run.insert(0, (operation, old_state, new_state))
-
- # Phase 2
- for operation, to_state, from_state in to_run:
- if collect_sql:
- schema_editor.collected_sql.append("--")
- if not operation.reduces_to_sql:
- schema_editor.collected_sql.append(
- "-- MIGRATION NOW PERFORMS OPERATION THAT CANNOT BE WRITTEN AS SQL:"
- )
- schema_editor.collected_sql.append("-- %s" % operation.describe())
- schema_editor.collected_sql.append("--")
- if not operation.reduces_to_sql:
- continue
- atomic_operation = operation.atomic or (self.atomic and operation.atomic is not False)
- if not schema_editor.atomic_migration and atomic_operation:
- # Force a transaction on a non-transactional-DDL backend or an
- # atomic operation inside a non-atomic migration.
- with atomic(schema_editor.connection.alias):
- operation.database_backwards(self.app_label, schema_editor, from_state, to_state)
- else:
- # Normal behaviour
- operation.database_backwards(self.app_label, schema_editor, from_state, to_state)
- return project_state
-
-
-class SwappableTuple(tuple):
- """
- Subclass of tuple so Django can tell this was originally a swappable
- dependency when it reads the migration file.
- """
-
- def __new__(cls, value, setting):
- self = tuple.__new__(cls, value)
- self.setting = setting
- return self
-
-
-def swappable_dependency(value):
- """Turn a setting value into a dependency."""
- return SwappableTuple((value.split(".", 1)[0], "__first__"), value)
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/base.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/base.py
deleted file mode 100644
index 6ecbfac..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/base.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from django.db import router
-
-
-class Operation:
- """
- Base class for migration operations.
-
- It's responsible for both mutating the in-memory model state
- (see db/migrations/state.py) to represent what it performs, as well
- as actually performing it against a live database.
-
- Note that some operations won't modify memory state at all (e.g. data
- copying operations), and some will need their modifications to be
- optionally specified by the user (e.g. custom Python code snippets)
-
- Due to the way this class deals with deconstruction, it should be
- considered immutable.
- """
-
- # If this migration can be run in reverse.
- # Some operations are impossible to reverse, like deleting data.
- reversible = True
-
- # Can this migration be represented as SQL? (things like RunPython cannot)
- reduces_to_sql = True
-
- # Should this operation be forced as atomic even on backends with no
- # DDL transaction support (i.e., does it have no DDL, like RunPython)
- atomic = False
-
- # Should this operation be considered safe to elide and optimize across?
- elidable = False
-
- serialization_expand_args = []
-
- def __new__(cls, *args, **kwargs):
- # We capture the arguments to make returning them trivial
- self = object.__new__(cls)
- self._constructor_args = (args, kwargs)
- return self
-
- def deconstruct(self):
- """
- Return a 3-tuple of class import path (or just name if it lives
- under django.db.migrations), positional arguments, and keyword
- arguments.
- """
- return (
- self.__class__.__name__,
- self._constructor_args[0],
- self._constructor_args[1],
- )
-
- def state_forwards(self, app_label, state):
- """
- Take the state from the previous migration, and mutate it
- so that it matches what this migration would perform.
- """
- raise NotImplementedError('subclasses of Operation must provide a state_forwards() method')
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- """
- Perform the mutation on the database schema in the normal
- (forwards) direction.
- """
- raise NotImplementedError('subclasses of Operation must provide a database_forwards() method')
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- """
- Perform the mutation on the database schema in the reverse
- direction - e.g. if this were CreateModel, it would in fact
- drop the model's table.
- """
- raise NotImplementedError('subclasses of Operation must provide a database_backwards() method')
-
- def describe(self):
- """
- Output a brief summary of what the action does.
- """
- return "%s: %s" % (self.__class__.__name__, self._constructor_args)
-
- def references_model(self, name, app_label=None):
- """
- Return True if there is a chance this operation references the given
- model name (as a string), with an optional app label for accuracy.
-
- Used for optimization. If in doubt, return True;
- returning a false positive will merely make the optimizer a little
- less efficient, while returning a false negative may result in an
- unusable optimized migration.
- """
- return True
-
- def references_field(self, model_name, name, app_label=None):
- """
- Return True if there is a chance this operation references the given
- field name, with an optional app label for accuracy.
-
- Used for optimization. If in doubt, return True.
- """
- return self.references_model(model_name, app_label)
-
- def allow_migrate_model(self, connection_alias, model):
- """
- Return wether or not a model may be migrated.
-
- This is a thin wrapper around router.allow_migrate_model() that
- preemptively rejects any proxy, swapped out, or unmanaged model.
- """
- if not model._meta.can_migrate(connection_alias):
- return False
-
- return router.allow_migrate_model(connection_alias, model)
-
- def reduce(self, operation, in_between, app_label=None):
- """
- Return either a list of operations the actual operation should be
- replaced with or a boolean that indicates whether or not the specified
- operation can be optimized across.
- """
- if self.elidable:
- return [operation]
- elif operation.elidable:
- return [self]
- return False
-
- def __repr__(self):
- return "<%s %s%s>" % (
- self.__class__.__name__,
- ", ".join(map(repr, self._constructor_args[0])),
- ",".join(" %s=%r" % x for x in self._constructor_args[1].items()),
- )
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/fields.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/fields.py
deleted file mode 100644
index d103e5c..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/fields.py
+++ /dev/null
@@ -1,342 +0,0 @@
-from django.core.exceptions import FieldDoesNotExist
-from django.db.models.fields import NOT_PROVIDED
-from django.utils.functional import cached_property
-
-from .base import Operation
-from .utils import is_referenced_by_foreign_key
-
-
-class FieldOperation(Operation):
- def __init__(self, model_name, name):
- self.model_name = model_name
- self.name = name
-
- @cached_property
- def model_name_lower(self):
- return self.model_name.lower()
-
- @cached_property
- def name_lower(self):
- return self.name.lower()
-
- def is_same_model_operation(self, operation):
- return self.model_name_lower == operation.model_name_lower
-
- def is_same_field_operation(self, operation):
- return self.is_same_model_operation(operation) and self.name_lower == operation.name_lower
-
- def references_model(self, name, app_label=None):
- return name.lower() == self.model_name_lower
-
- def references_field(self, model_name, name, app_label=None):
- return self.references_model(model_name) and name.lower() == self.name_lower
-
- def reduce(self, operation, in_between, app_label=None):
- return (
- super().reduce(operation, in_between, app_label=app_label) or
- not operation.references_field(self.model_name, self.name, app_label)
- )
-
-
-class AddField(FieldOperation):
- """Add a field to a model."""
-
- def __init__(self, model_name, name, field, preserve_default=True):
- self.field = field
- self.preserve_default = preserve_default
- super().__init__(model_name, name)
-
- def deconstruct(self):
- kwargs = {
- 'model_name': self.model_name,
- 'name': self.name,
- 'field': self.field,
- }
- if self.preserve_default is not True:
- kwargs['preserve_default'] = self.preserve_default
- return (
- self.__class__.__name__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- # If preserve default is off, don't use the default for future state
- if not self.preserve_default:
- field = self.field.clone()
- field.default = NOT_PROVIDED
- else:
- field = self.field
- state.models[app_label, self.model_name_lower].fields.append((self.name, field))
- # Delay rendering of relationships if it's not a relational field
- delay = not field.is_relation
- state.reload_model(app_label, self.model_name_lower, delay=delay)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- to_model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, to_model):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- field = to_model._meta.get_field(self.name)
- if not self.preserve_default:
- field.default = self.field.default
- schema_editor.add_field(
- from_model,
- field,
- )
- if not self.preserve_default:
- field.default = NOT_PROVIDED
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, from_model):
- schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
-
- def describe(self):
- return "Add field %s to %s" % (self.name, self.model_name)
-
- def reduce(self, operation, in_between, app_label=None):
- if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation):
- if isinstance(operation, AlterField):
- return [
- AddField(
- model_name=self.model_name,
- name=operation.name,
- field=operation.field,
- ),
- ]
- elif isinstance(operation, RemoveField):
- return []
- elif isinstance(operation, RenameField):
- return [
- AddField(
- model_name=self.model_name,
- name=operation.new_name,
- field=self.field,
- ),
- ]
- return super().reduce(operation, in_between, app_label=app_label)
-
-
-class RemoveField(FieldOperation):
- """Remove a field from a model."""
-
- def deconstruct(self):
- kwargs = {
- 'model_name': self.model_name,
- 'name': self.name,
- }
- return (
- self.__class__.__name__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- new_fields = []
- old_field = None
- for name, instance in state.models[app_label, self.model_name_lower].fields:
- if name != self.name:
- new_fields.append((name, instance))
- else:
- old_field = instance
- state.models[app_label, self.model_name_lower].fields = new_fields
- # Delay rendering of relationships if it's not a relational field
- delay = not old_field.is_relation
- state.reload_model(app_label, self.model_name_lower, delay=delay)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, from_model):
- schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- to_model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, to_model):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- schema_editor.add_field(from_model, to_model._meta.get_field(self.name))
-
- def describe(self):
- return "Remove field %s from %s" % (self.name, self.model_name)
-
-
-class AlterField(FieldOperation):
- """
- Alter a field's database column (e.g. null, max_length) to the provided
- new field.
- """
-
- def __init__(self, model_name, name, field, preserve_default=True):
- self.field = field
- self.preserve_default = preserve_default
- super().__init__(model_name, name)
-
- def deconstruct(self):
- kwargs = {
- 'model_name': self.model_name,
- 'name': self.name,
- 'field': self.field,
- }
- if self.preserve_default is not True:
- kwargs['preserve_default'] = self.preserve_default
- return (
- self.__class__.__name__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- if not self.preserve_default:
- field = self.field.clone()
- field.default = NOT_PROVIDED
- else:
- field = self.field
- state.models[app_label, self.model_name_lower].fields = [
- (n, field if n == self.name else f)
- for n, f in
- state.models[app_label, self.model_name_lower].fields
- ]
- # TODO: investigate if old relational fields must be reloaded or if it's
- # sufficient if the new field is (#27737).
- # Delay rendering of relationships if it's not a relational field and
- # not referenced by a foreign key.
- delay = (
- not field.is_relation and
- not is_referenced_by_foreign_key(state, self.model_name_lower, self.field, self.name)
- )
- state.reload_model(app_label, self.model_name_lower, delay=delay)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- to_model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, to_model):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- from_field = from_model._meta.get_field(self.name)
- to_field = to_model._meta.get_field(self.name)
- if not self.preserve_default:
- to_field.default = self.field.default
- schema_editor.alter_field(from_model, from_field, to_field)
- if not self.preserve_default:
- to_field.default = NOT_PROVIDED
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- self.database_forwards(app_label, schema_editor, from_state, to_state)
-
- def describe(self):
- return "Alter field %s on %s" % (self.name, self.model_name)
-
- def reduce(self, operation, in_between, app_label=None):
- if isinstance(operation, RemoveField) and self.is_same_field_operation(operation):
- return [operation]
- elif isinstance(operation, RenameField) and self.is_same_field_operation(operation):
- return [
- operation,
- AlterField(
- model_name=self.model_name,
- name=operation.new_name,
- field=self.field,
- ),
- ]
- return super().reduce(operation, in_between, app_label=app_label)
-
-
-class RenameField(FieldOperation):
- """Rename a field on the model. Might affect db_column too."""
-
- def __init__(self, model_name, old_name, new_name):
- self.old_name = old_name
- self.new_name = new_name
- super().__init__(model_name, old_name)
-
- @cached_property
- def old_name_lower(self):
- return self.old_name.lower()
-
- @cached_property
- def new_name_lower(self):
- return self.new_name.lower()
-
- def deconstruct(self):
- kwargs = {
- 'model_name': self.model_name,
- 'old_name': self.old_name,
- 'new_name': self.new_name,
- }
- return (
- self.__class__.__name__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.model_name_lower]
- # Rename the field
- fields = model_state.fields
- for index, (name, field) in enumerate(fields):
- if name == self.old_name:
- fields[index] = (self.new_name, field)
- # Delay rendering of relationships if it's not a relational
- # field and not referenced by a foreign key.
- delay = (
- not field.is_relation and
- not is_referenced_by_foreign_key(state, self.model_name_lower, field, self.name)
- )
- break
- else:
- raise FieldDoesNotExist(
- "%s.%s has no field named '%s'" % (app_label, self.model_name, self.old_name)
- )
- # Fix index/unique_together to refer to the new field
- options = model_state.options
- for option in ('index_together', 'unique_together'):
- if option in options:
- options[option] = [
- [self.new_name if n == self.old_name else n for n in together]
- for together in options[option]
- ]
- state.reload_model(app_label, self.model_name_lower, delay=delay)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- to_model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, to_model):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- schema_editor.alter_field(
- from_model,
- from_model._meta.get_field(self.old_name),
- to_model._meta.get_field(self.new_name),
- )
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- to_model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, to_model):
- from_model = from_state.apps.get_model(app_label, self.model_name)
- schema_editor.alter_field(
- from_model,
- from_model._meta.get_field(self.new_name),
- to_model._meta.get_field(self.old_name),
- )
-
- def describe(self):
- return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name)
-
- def references_field(self, model_name, name, app_label=None):
- return self.references_model(model_name) and (
- name.lower() == self.old_name_lower or
- name.lower() == self.new_name_lower
- )
-
- def reduce(self, operation, in_between, app_label=None):
- if (isinstance(operation, RenameField) and
- self.is_same_model_operation(operation) and
- self.new_name_lower == operation.old_name_lower):
- return [
- RenameField(
- self.model_name,
- self.old_name,
- operation.new_name,
- ),
- ]
- # Skip `FieldOperation.reduce` as we want to run `references_field`
- # against self.new_name.
- return (
- super(FieldOperation, self).reduce(operation, in_between, app_label=app_label) or
- not operation.references_field(self.model_name, self.new_name, app_label)
- )
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/models.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/models.py
deleted file mode 100644
index 9754181..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/models.py
+++ /dev/null
@@ -1,836 +0,0 @@
-from django.db import models
-from django.db.migrations.operations.base import Operation
-from django.db.migrations.state import ModelState
-from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
-from django.db.models.options import normalize_together
-from django.utils.functional import cached_property
-
-from .fields import (
- AddField, AlterField, FieldOperation, RemoveField, RenameField,
-)
-
-
-def _check_for_duplicates(arg_name, objs):
- used_vals = set()
- for val in objs:
- if val in used_vals:
- raise ValueError(
- "Found duplicate value %s in CreateModel %s argument." % (val, arg_name)
- )
- used_vals.add(val)
-
-
-class ModelOperation(Operation):
- def __init__(self, name):
- self.name = name
-
- @cached_property
- def name_lower(self):
- return self.name.lower()
-
- def references_model(self, name, app_label=None):
- return name.lower() == self.name_lower
-
- def reduce(self, operation, in_between, app_label=None):
- return (
- super().reduce(operation, in_between, app_label=app_label) or
- not operation.references_model(self.name, app_label)
- )
-
-
-class CreateModel(ModelOperation):
- """Create a model's table."""
-
- serialization_expand_args = ['fields', 'options', 'managers']
-
- def __init__(self, name, fields, options=None, bases=None, managers=None):
- self.fields = fields
- self.options = options or {}
- self.bases = bases or (models.Model,)
- self.managers = managers or []
- super().__init__(name)
- # Sanity-check that there are no duplicated field names, bases, or
- # manager names
- _check_for_duplicates('fields', (name for name, _ in self.fields))
- _check_for_duplicates('bases', (
- base._meta.label_lower if hasattr(base, '_meta') else
- base.lower() if isinstance(base, str) else base
- for base in self.bases
- ))
- _check_for_duplicates('managers', (name for name, _ in self.managers))
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- 'fields': self.fields,
- }
- if self.options:
- kwargs['options'] = self.options
- if self.bases and self.bases != (models.Model,):
- kwargs['bases'] = self.bases
- if self.managers and self.managers != [('objects', models.Manager())]:
- kwargs['managers'] = self.managers
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- state.add_model(ModelState(
- app_label,
- self.name,
- list(self.fields),
- dict(self.options),
- tuple(self.bases),
- list(self.managers),
- ))
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- model = to_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- schema_editor.create_model(model)
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- model = from_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- schema_editor.delete_model(model)
-
- def describe(self):
- return "Create %smodel %s" % ("proxy " if self.options.get("proxy", False) else "", self.name)
-
- def references_model(self, name, app_label=None):
- name_lower = name.lower()
- if name_lower == self.name_lower:
- return True
-
- # Check we didn't inherit from the model
- models_to_check = [
- base for base in self.bases
- if base is not models.Model and isinstance(base, (models.base.ModelBase, str))
- ]
- # Check we have no FKs/M2Ms with it
- for fname, field in self.fields:
- if field.remote_field:
- models_to_check.append(field.remote_field.model)
- # Now go over all the models and check against them
- for model in models_to_check:
- model_app_label, model_name = self.model_to_key(model)
- if model_name.lower() == name_lower:
- if app_label is None or not model_app_label or model_app_label == app_label:
- return True
- return False
-
- def model_to_key(self, model):
- """
- Take either a model class or an "app_label.ModelName" string
- and return (app_label, object_name).
- """
- if isinstance(model, str):
- return model.split(".", 1)
- else:
- return model._meta.app_label, model._meta.object_name
-
- def reduce(self, operation, in_between, app_label=None):
- if (isinstance(operation, DeleteModel) and
- self.name_lower == operation.name_lower and
- not self.options.get("proxy", False)):
- return []
- elif isinstance(operation, RenameModel) and self.name_lower == operation.old_name_lower:
- return [
- CreateModel(
- operation.new_name,
- fields=self.fields,
- options=self.options,
- bases=self.bases,
- managers=self.managers,
- ),
- ]
- elif isinstance(operation, AlterModelOptions) and self.name_lower == operation.name_lower:
- new_options = self.options.copy()
- new_options.update(operation.options)
- return [
- CreateModel(
- self.name,
- fields=self.fields,
- options=new_options,
- bases=self.bases,
- managers=self.managers,
- ),
- ]
- elif isinstance(operation, FieldOperation) and self.name_lower == operation.model_name_lower:
- if isinstance(operation, AddField):
- # Don't allow optimizations of FKs through models they reference
- if hasattr(operation.field, "remote_field") and operation.field.remote_field:
- for between in in_between:
- # Check that it doesn't point to the model
- app_label, object_name = self.model_to_key(operation.field.remote_field.model)
- if between.references_model(object_name, app_label):
- return False
- # Check that it's not through the model
- if getattr(operation.field.remote_field, "through", None):
- app_label, object_name = self.model_to_key(operation.field.remote_field.through)
- if between.references_model(object_name, app_label):
- return False
- return [
- CreateModel(
- self.name,
- fields=self.fields + [(operation.name, operation.field)],
- options=self.options,
- bases=self.bases,
- managers=self.managers,
- ),
- ]
- elif isinstance(operation, AlterField):
- return [
- CreateModel(
- self.name,
- fields=[
- (n, operation.field if n == operation.name else v)
- for n, v in self.fields
- ],
- options=self.options,
- bases=self.bases,
- managers=self.managers,
- ),
- ]
- elif isinstance(operation, RemoveField):
- return [
- CreateModel(
- self.name,
- fields=[
- (n, v)
- for n, v in self.fields
- if n.lower() != operation.name_lower
- ],
- options=self.options,
- bases=self.bases,
- managers=self.managers,
- ),
- ]
- elif isinstance(operation, RenameField):
- return [
- CreateModel(
- self.name,
- fields=[
- (operation.new_name if n == operation.old_name else n, v)
- for n, v in self.fields
- ],
- options=self.options,
- bases=self.bases,
- managers=self.managers,
- ),
- ]
- return super().reduce(operation, in_between, app_label=app_label)
-
-
-class DeleteModel(ModelOperation):
- """Drop a model's table."""
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- state.remove_model(app_label, self.name_lower)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- model = from_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- schema_editor.delete_model(model)
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- model = to_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- schema_editor.create_model(model)
-
- def describe(self):
- return "Delete model %s" % (self.name, )
-
-
-class RenameModel(ModelOperation):
- """Rename a model."""
-
- def __init__(self, old_name, new_name):
- self.old_name = old_name
- self.new_name = new_name
- super().__init__(old_name)
-
- @cached_property
- def old_name_lower(self):
- return self.old_name.lower()
-
- @cached_property
- def new_name_lower(self):
- return self.new_name.lower()
-
- def deconstruct(self):
- kwargs = {
- 'old_name': self.old_name,
- 'new_name': self.new_name,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def _get_model_tuple(self, remote_model, app_label, model_name):
- if remote_model == RECURSIVE_RELATIONSHIP_CONSTANT:
- return app_label, model_name.lower()
- elif '.' in remote_model:
- return tuple(remote_model.lower().split('.'))
- else:
- return app_label, remote_model.lower()
-
- def state_forwards(self, app_label, state):
- # Add a new model.
- renamed_model = state.models[app_label, self.old_name_lower].clone()
- renamed_model.name = self.new_name
- state.models[app_label, self.new_name_lower] = renamed_model
- # Repoint all fields pointing to the old model to the new one.
- old_model_tuple = app_label, self.old_name_lower
- new_remote_model = '%s.%s' % (app_label, self.new_name)
- to_reload = []
- for (model_app_label, model_name), model_state in state.models.items():
- model_changed = False
- for index, (name, field) in enumerate(model_state.fields):
- changed_field = None
- remote_field = field.remote_field
- if remote_field:
- remote_model_tuple = self._get_model_tuple(
- remote_field.model, model_app_label, model_name
- )
- if remote_model_tuple == old_model_tuple:
- changed_field = field.clone()
- changed_field.remote_field.model = new_remote_model
- through_model = getattr(remote_field, 'through', None)
- if through_model:
- through_model_tuple = self._get_model_tuple(
- through_model, model_app_label, model_name
- )
- if through_model_tuple == old_model_tuple:
- if changed_field is None:
- changed_field = field.clone()
- changed_field.remote_field.through = new_remote_model
- if changed_field:
- model_state.fields[index] = name, changed_field
- model_changed = True
- if model_changed:
- to_reload.append((model_app_label, model_name))
- # Reload models related to old model before removing the old model.
- state.reload_models(to_reload, delay=True)
- # Remove the old model.
- state.remove_model(app_label, self.old_name_lower)
- state.reload_model(app_label, self.new_name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- new_model = to_state.apps.get_model(app_label, self.new_name)
- if self.allow_migrate_model(schema_editor.connection.alias, new_model):
- old_model = from_state.apps.get_model(app_label, self.old_name)
- # Move the main table
- schema_editor.alter_db_table(
- new_model,
- old_model._meta.db_table,
- new_model._meta.db_table,
- )
- # Alter the fields pointing to us
- for related_object in old_model._meta.related_objects:
- if related_object.related_model == old_model:
- model = new_model
- related_key = (app_label, self.new_name_lower)
- else:
- model = related_object.related_model
- related_key = (
- related_object.related_model._meta.app_label,
- related_object.related_model._meta.model_name,
- )
- to_field = to_state.apps.get_model(
- *related_key
- )._meta.get_field(related_object.field.name)
- schema_editor.alter_field(
- model,
- related_object.field,
- to_field,
- )
- # Rename M2M fields whose name is based on this model's name.
- fields = zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many)
- for (old_field, new_field) in fields:
- # Skip self-referential fields as these are renamed above.
- if new_field.model == new_field.related_model or not new_field.remote_field.through._meta.auto_created:
- continue
- # Rename the M2M table that's based on this model's name.
- old_m2m_model = old_field.remote_field.through
- new_m2m_model = new_field.remote_field.through
- schema_editor.alter_db_table(
- new_m2m_model,
- old_m2m_model._meta.db_table,
- new_m2m_model._meta.db_table,
- )
- # Rename the column in the M2M table that's based on this
- # model's name.
- schema_editor.alter_field(
- new_m2m_model,
- old_m2m_model._meta.get_field(old_model._meta.model_name),
- new_m2m_model._meta.get_field(new_model._meta.model_name),
- )
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
- self.new_name, self.old_name = self.old_name, self.new_name
-
- self.database_forwards(app_label, schema_editor, from_state, to_state)
-
- self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
- self.new_name, self.old_name = self.old_name, self.new_name
-
- def references_model(self, name, app_label=None):
- return (
- name.lower() == self.old_name_lower or
- name.lower() == self.new_name_lower
- )
-
- def describe(self):
- return "Rename model %s to %s" % (self.old_name, self.new_name)
-
- def reduce(self, operation, in_between, app_label=None):
- if (isinstance(operation, RenameModel) and
- self.new_name_lower == operation.old_name_lower):
- return [
- RenameModel(
- self.old_name,
- operation.new_name,
- ),
- ]
- # Skip `ModelOperation.reduce` as we want to run `references_model`
- # against self.new_name.
- return (
- super(ModelOperation, self).reduce(operation, in_between, app_label=app_label) or
- not operation.references_model(self.new_name, app_label)
- )
-
-
-class AlterModelTable(ModelOperation):
- """Rename a model's table."""
-
- def __init__(self, name, table):
- self.table = table
- super().__init__(name)
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- 'table': self.table,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- state.models[app_label, self.name_lower].options["db_table"] = self.table
- state.reload_model(app_label, self.name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- new_model = to_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, new_model):
- old_model = from_state.apps.get_model(app_label, self.name)
- schema_editor.alter_db_table(
- new_model,
- old_model._meta.db_table,
- new_model._meta.db_table,
- )
- # Rename M2M fields whose name is based on this model's db_table
- for (old_field, new_field) in zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many):
- if new_field.remote_field.through._meta.auto_created:
- schema_editor.alter_db_table(
- new_field.remote_field.through,
- old_field.remote_field.through._meta.db_table,
- new_field.remote_field.through._meta.db_table,
- )
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- return self.database_forwards(app_label, schema_editor, from_state, to_state)
-
- def describe(self):
- return "Rename table for %s to %s" % (
- self.name,
- self.table if self.table is not None else "(default)"
- )
-
- def reduce(self, operation, in_between, app_label=None):
- if isinstance(operation, (AlterModelTable, DeleteModel)) and self.name_lower == operation.name_lower:
- return [operation]
- return super().reduce(operation, in_between, app_label=app_label)
-
-
-class ModelOptionOperation(ModelOperation):
- def reduce(self, operation, in_between, app_label=None):
- if isinstance(operation, (self.__class__, DeleteModel)) and self.name_lower == operation.name_lower:
- return [operation]
- return super().reduce(operation, in_between, app_label=app_label)
-
-
-class FieldRelatedOptionOperation(ModelOptionOperation):
- def reduce(self, operation, in_between, app_label=None):
- if (isinstance(operation, FieldOperation) and
- self.name_lower == operation.model_name_lower and
- not self.references_field(operation.model_name, operation.name)):
- return [operation, self]
- return super().reduce(operation, in_between, app_label=app_label)
-
-
-class AlterUniqueTogether(FieldRelatedOptionOperation):
- """
- Change the value of unique_together to the target one.
- Input value of unique_together must be a set of tuples.
- """
- option_name = "unique_together"
-
- def __init__(self, name, unique_together):
- unique_together = normalize_together(unique_together)
- self.unique_together = {tuple(cons) for cons in unique_together}
- super().__init__(name)
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- 'unique_together': self.unique_together,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.name_lower]
- model_state.options[self.option_name] = self.unique_together
- state.reload_model(app_label, self.name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- new_model = to_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, new_model):
- old_model = from_state.apps.get_model(app_label, self.name)
- schema_editor.alter_unique_together(
- new_model,
- getattr(old_model._meta, self.option_name, set()),
- getattr(new_model._meta, self.option_name, set()),
- )
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- return self.database_forwards(app_label, schema_editor, from_state, to_state)
-
- def references_field(self, model_name, name, app_label=None):
- return (
- self.references_model(model_name, app_label) and
- (
- not self.unique_together or
- any((name in together) for together in self.unique_together)
- )
- )
-
- def describe(self):
- return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.unique_together or ''))
-
-
-class AlterIndexTogether(FieldRelatedOptionOperation):
- """
- Change the value of index_together to the target one.
- Input value of index_together must be a set of tuples.
- """
- option_name = "index_together"
-
- def __init__(self, name, index_together):
- index_together = normalize_together(index_together)
- self.index_together = {tuple(cons) for cons in index_together}
- super().__init__(name)
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- 'index_together': self.index_together,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.name_lower]
- model_state.options[self.option_name] = self.index_together
- state.reload_model(app_label, self.name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- new_model = to_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, new_model):
- old_model = from_state.apps.get_model(app_label, self.name)
- schema_editor.alter_index_together(
- new_model,
- getattr(old_model._meta, self.option_name, set()),
- getattr(new_model._meta, self.option_name, set()),
- )
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- return self.database_forwards(app_label, schema_editor, from_state, to_state)
-
- def references_field(self, model_name, name, app_label=None):
- return (
- self.references_model(model_name, app_label) and
- (
- not self.index_together or
- any((name in together) for together in self.index_together)
- )
- )
-
- def describe(self):
- return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.index_together or ''))
-
-
-class AlterOrderWithRespectTo(FieldRelatedOptionOperation):
- """Represent a change with the order_with_respect_to option."""
-
- def __init__(self, name, order_with_respect_to):
- self.order_with_respect_to = order_with_respect_to
- super().__init__(name)
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- 'order_with_respect_to': self.order_with_respect_to,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.name_lower]
- model_state.options['order_with_respect_to'] = self.order_with_respect_to
- state.reload_model(app_label, self.name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- to_model = to_state.apps.get_model(app_label, self.name)
- if self.allow_migrate_model(schema_editor.connection.alias, to_model):
- from_model = from_state.apps.get_model(app_label, self.name)
- # Remove a field if we need to
- if from_model._meta.order_with_respect_to and not to_model._meta.order_with_respect_to:
- schema_editor.remove_field(from_model, from_model._meta.get_field("_order"))
- # Add a field if we need to (altering the column is untouched as
- # it's likely a rename)
- elif to_model._meta.order_with_respect_to and not from_model._meta.order_with_respect_to:
- field = to_model._meta.get_field("_order")
- if not field.has_default():
- field.default = 0
- schema_editor.add_field(
- from_model,
- field,
- )
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- self.database_forwards(app_label, schema_editor, from_state, to_state)
-
- def references_field(self, model_name, name, app_label=None):
- return (
- self.references_model(model_name, app_label) and
- (
- self.order_with_respect_to is None or
- name == self.order_with_respect_to
- )
- )
-
- def describe(self):
- return "Set order_with_respect_to on %s to %s" % (self.name, self.order_with_respect_to)
-
-
-class AlterModelOptions(ModelOptionOperation):
- """
- Set new model options that don't directly affect the database schema
- (like verbose_name, permissions, ordering). Python code in migrations
- may still need them.
- """
-
- # Model options we want to compare and preserve in an AlterModelOptions op
- ALTER_OPTION_KEYS = [
- "base_manager_name",
- "default_manager_name",
- "get_latest_by",
- "managed",
- "ordering",
- "permissions",
- "default_permissions",
- "select_on_save",
- "verbose_name",
- "verbose_name_plural",
- ]
-
- def __init__(self, name, options):
- self.options = options
- super().__init__(name)
-
- def deconstruct(self):
- kwargs = {
- 'name': self.name,
- 'options': self.options,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.name_lower]
- model_state.options = dict(model_state.options)
- model_state.options.update(self.options)
- for key in self.ALTER_OPTION_KEYS:
- if key not in self.options and key in model_state.options:
- del model_state.options[key]
- state.reload_model(app_label, self.name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- pass
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- pass
-
- def describe(self):
- return "Change Meta options on %s" % (self.name, )
-
-
-class AlterModelManagers(ModelOptionOperation):
- """Alter the model's managers."""
-
- serialization_expand_args = ['managers']
-
- def __init__(self, name, managers):
- self.managers = managers
- super().__init__(name)
-
- def deconstruct(self):
- return (
- self.__class__.__qualname__,
- [self.name, self.managers],
- {}
- )
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.name_lower]
- model_state.managers = list(self.managers)
- state.reload_model(app_label, self.name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- pass
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- pass
-
- def describe(self):
- return "Change managers on %s" % (self.name, )
-
-
-class IndexOperation(Operation):
- option_name = 'indexes'
-
- @cached_property
- def model_name_lower(self):
- return self.model_name.lower()
-
-
-class AddIndex(IndexOperation):
- """Add an index on a model."""
-
- def __init__(self, model_name, index):
- self.model_name = model_name
- if not index.name:
- raise ValueError(
- "Indexes passed to AddIndex operations require a name "
- "argument. %r doesn't have one." % index
- )
- self.index = index
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.model_name_lower]
- indexes = list(model_state.options[self.option_name])
- indexes.append(self.index.clone())
- model_state.options[self.option_name] = indexes
- state.reload_model(app_label, self.model_name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- schema_editor.add_index(model, self.index)
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- model = from_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- schema_editor.remove_index(model, self.index)
-
- def deconstruct(self):
- kwargs = {
- 'model_name': self.model_name,
- 'index': self.index,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs,
- )
-
- def describe(self):
- return 'Create index %s on field(s) %s of model %s' % (
- self.index.name,
- ', '.join(self.index.fields),
- self.model_name,
- )
-
-
-class RemoveIndex(IndexOperation):
- """Remove an index from a model."""
-
- def __init__(self, model_name, name):
- self.model_name = model_name
- self.name = name
-
- def state_forwards(self, app_label, state):
- model_state = state.models[app_label, self.model_name_lower]
- indexes = model_state.options[self.option_name]
- model_state.options[self.option_name] = [idx for idx in indexes if idx.name != self.name]
- state.reload_model(app_label, self.model_name_lower, delay=True)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- model = from_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- from_model_state = from_state.models[app_label, self.model_name_lower]
- index = from_model_state.get_index_by_name(self.name)
- schema_editor.remove_index(model, index)
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- model = to_state.apps.get_model(app_label, self.model_name)
- if self.allow_migrate_model(schema_editor.connection.alias, model):
- to_model_state = to_state.models[app_label, self.model_name_lower]
- index = to_model_state.get_index_by_name(self.name)
- schema_editor.add_index(model, index)
-
- def deconstruct(self):
- kwargs = {
- 'model_name': self.model_name,
- 'name': self.name,
- }
- return (
- self.__class__.__qualname__,
- [],
- kwargs,
- )
-
- def describe(self):
- return 'Remove index %s from %s' % (self.name, self.model_name)
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/special.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/special.py
deleted file mode 100644
index 5a8510e..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/special.py
+++ /dev/null
@@ -1,203 +0,0 @@
-from django.db import router
-
-from .base import Operation
-
-
-class SeparateDatabaseAndState(Operation):
- """
- Take two lists of operations - ones that will be used for the database,
- and ones that will be used for the state change. This allows operations
- that don't support state change to have it applied, or have operations
- that affect the state or not the database, or so on.
- """
-
- serialization_expand_args = ['database_operations', 'state_operations']
-
- def __init__(self, database_operations=None, state_operations=None):
- self.database_operations = database_operations or []
- self.state_operations = state_operations or []
-
- def deconstruct(self):
- kwargs = {}
- if self.database_operations:
- kwargs['database_operations'] = self.database_operations
- if self.state_operations:
- kwargs['state_operations'] = self.state_operations
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- def state_forwards(self, app_label, state):
- for state_operation in self.state_operations:
- state_operation.state_forwards(app_label, state)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- # We calculate state separately in here since our state functions aren't useful
- for database_operation in self.database_operations:
- to_state = from_state.clone()
- database_operation.state_forwards(app_label, to_state)
- database_operation.database_forwards(app_label, schema_editor, from_state, to_state)
- from_state = to_state
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- # We calculate state separately in here since our state functions aren't useful
- to_states = {}
- for dbop in self.database_operations:
- to_states[dbop] = to_state
- to_state = to_state.clone()
- dbop.state_forwards(app_label, to_state)
- # to_state now has the states of all the database_operations applied
- # which is the from_state for the backwards migration of the last
- # operation.
- for database_operation in reversed(self.database_operations):
- from_state = to_state
- to_state = to_states[database_operation]
- database_operation.database_backwards(app_label, schema_editor, from_state, to_state)
-
- def describe(self):
- return "Custom state/database change combination"
-
-
-class RunSQL(Operation):
- """
- Run some raw SQL. A reverse SQL statement may be provided.
-
- Also accept a list of operations that represent the state change effected
- by this SQL change, in case it's custom column/table creation/deletion.
- """
- noop = ''
-
- def __init__(self, sql, reverse_sql=None, state_operations=None, hints=None, elidable=False):
- self.sql = sql
- self.reverse_sql = reverse_sql
- self.state_operations = state_operations or []
- self.hints = hints or {}
- self.elidable = elidable
-
- def deconstruct(self):
- kwargs = {
- 'sql': self.sql,
- }
- if self.reverse_sql is not None:
- kwargs['reverse_sql'] = self.reverse_sql
- if self.state_operations:
- kwargs['state_operations'] = self.state_operations
- if self.hints:
- kwargs['hints'] = self.hints
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- @property
- def reversible(self):
- return self.reverse_sql is not None
-
- def state_forwards(self, app_label, state):
- for state_operation in self.state_operations:
- state_operation.state_forwards(app_label, state)
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints):
- self._run_sql(schema_editor, self.sql)
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- if self.reverse_sql is None:
- raise NotImplementedError("You cannot reverse this operation")
- if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints):
- self._run_sql(schema_editor, self.reverse_sql)
-
- def describe(self):
- return "Raw SQL operation"
-
- def _run_sql(self, schema_editor, sqls):
- if isinstance(sqls, (list, tuple)):
- for sql in sqls:
- params = None
- if isinstance(sql, (list, tuple)):
- elements = len(sql)
- if elements == 2:
- sql, params = sql
- else:
- raise ValueError("Expected a 2-tuple but got %d" % elements)
- schema_editor.execute(sql, params=params)
- elif sqls != RunSQL.noop:
- statements = schema_editor.connection.ops.prepare_sql_script(sqls)
- for statement in statements:
- schema_editor.execute(statement, params=None)
-
-
-class RunPython(Operation):
- """
- Run Python code in a context suitable for doing versioned ORM operations.
- """
-
- reduces_to_sql = False
-
- def __init__(self, code, reverse_code=None, atomic=None, hints=None, elidable=False):
- self.atomic = atomic
- # Forwards code
- if not callable(code):
- raise ValueError("RunPython must be supplied with a callable")
- self.code = code
- # Reverse code
- if reverse_code is None:
- self.reverse_code = None
- else:
- if not callable(reverse_code):
- raise ValueError("RunPython must be supplied with callable arguments")
- self.reverse_code = reverse_code
- self.hints = hints or {}
- self.elidable = elidable
-
- def deconstruct(self):
- kwargs = {
- 'code': self.code,
- }
- if self.reverse_code is not None:
- kwargs['reverse_code'] = self.reverse_code
- if self.atomic is not None:
- kwargs['atomic'] = self.atomic
- if self.hints:
- kwargs['hints'] = self.hints
- return (
- self.__class__.__qualname__,
- [],
- kwargs
- )
-
- @property
- def reversible(self):
- return self.reverse_code is not None
-
- def state_forwards(self, app_label, state):
- # RunPython objects have no state effect. To add some, combine this
- # with SeparateDatabaseAndState.
- pass
-
- def database_forwards(self, app_label, schema_editor, from_state, to_state):
- # RunPython has access to all models. Ensure that all models are
- # reloaded in case any are delayed.
- from_state.clear_delayed_apps_cache()
- if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints):
- # We now execute the Python code in a context that contains a 'models'
- # object, representing the versioned models as an app registry.
- # We could try to override the global cache, but then people will still
- # use direct imports, so we go with a documentation approach instead.
- self.code(from_state.apps, schema_editor)
-
- def database_backwards(self, app_label, schema_editor, from_state, to_state):
- if self.reverse_code is None:
- raise NotImplementedError("You cannot reverse this operation")
- if router.allow_migrate(schema_editor.connection.alias, app_label, **self.hints):
- self.reverse_code(from_state.apps, schema_editor)
-
- def describe(self):
- return "Raw Python operation"
-
- @staticmethod
- def noop(apps, schema_editor):
- return None
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/utils.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/utils.py
deleted file mode 100644
index af23ea9..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/operations/utils.py
+++ /dev/null
@@ -1,9 +0,0 @@
-def is_referenced_by_foreign_key(state, model_name_lower, field, field_name):
- for state_app_label, state_model in state.models:
- for _, f in state.models[state_app_label, state_model].fields:
- if (f.related_model and
- '%s.%s' % (state_app_label, model_name_lower) == f.related_model.lower() and
- hasattr(f, 'to_fields')):
- if (f.to_fields[0] is None and field.primary_key) or field_name in f.to_fields:
- return True
- return False
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/optimizer.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/optimizer.py
deleted file mode 100644
index d31ab89..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/optimizer.py
+++ /dev/null
@@ -1,61 +0,0 @@
-class MigrationOptimizer:
- """
- Power the optimization process, where you provide a list of Operations
- and you are returned a list of equal or shorter length - operations
- are merged into one if possible.
-
- For example, a CreateModel and an AddField can be optimized into a
- new CreateModel, and CreateModel and DeleteModel can be optimized into
- nothing.
- """
-
- def optimize(self, operations, app_label=None):
- """
- Main optimization entry point. Pass in a list of Operation instances,
- get out a new list of Operation instances.
-
- Unfortunately, due to the scope of the optimization (two combinable
- operations might be separated by several hundred others), this can't be
- done as a peephole optimization with checks/output implemented on
- the Operations themselves; instead, the optimizer looks at each
- individual operation and scans forwards in the list to see if there
- are any matches, stopping at boundaries - operations which can't
- be optimized over (RunSQL, operations on the same field/model, etc.)
-
- The inner loop is run until the starting list is the same as the result
- list, and then the result is returned. This means that operation
- optimization must be stable and always return an equal or shorter list.
-
- The app_label argument is optional, but if you pass it you'll get more
- efficient optimization.
- """
- # Internal tracking variable for test assertions about # of loops
- self._iterations = 0
- while True:
- result = self.optimize_inner(operations, app_label)
- self._iterations += 1
- if result == operations:
- return result
- operations = result
-
- def optimize_inner(self, operations, app_label=None):
- """Inner optimization loop."""
- new_operations = []
- for i, operation in enumerate(operations):
- # Compare it to each operation after it
- for j, other in enumerate(operations[i + 1:]):
- in_between = operations[i + 1:i + j + 1]
- result = operation.reduce(other, in_between, app_label)
- if isinstance(result, list):
- # Optimize! Add result, then remaining others, then return
- new_operations.extend(result)
- new_operations.extend(in_between)
- new_operations.extend(operations[i + j + 2:])
- return new_operations
- if not result:
- # We can't optimize across `other`.
- new_operations.append(operation)
- break
- else:
- new_operations.append(operation)
- return new_operations
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/questioner.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/questioner.py
deleted file mode 100644
index 9b5b9f3..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/questioner.py
+++ /dev/null
@@ -1,237 +0,0 @@
-import importlib
-import os
-import sys
-
-from django.apps import apps
-from django.db.models.fields import NOT_PROVIDED
-from django.utils import datetime_safe, timezone
-
-from .loader import MigrationLoader
-
-
-class MigrationQuestioner:
- """
- Give the autodetector responses to questions it might have.
- This base class has a built-in noninteractive mode, but the
- interactive subclass is what the command-line arguments will use.
- """
-
- def __init__(self, defaults=None, specified_apps=None, dry_run=None):
- self.defaults = defaults or {}
- self.specified_apps = specified_apps or set()
- self.dry_run = dry_run
-
- def ask_initial(self, app_label):
- """Should we create an initial migration for the app?"""
- # If it was specified on the command line, definitely true
- if app_label in self.specified_apps:
- return True
- # Otherwise, we look to see if it has a migrations module
- # without any Python files in it, apart from __init__.py.
- # Apps from the new app template will have these; the python
- # file check will ensure we skip South ones.
- try:
- app_config = apps.get_app_config(app_label)
- except LookupError: # It's a fake app.
- return self.defaults.get("ask_initial", False)
- migrations_import_path, _ = MigrationLoader.migrations_module(app_config.label)
- if migrations_import_path is None:
- # It's an application with migrations disabled.
- return self.defaults.get("ask_initial", False)
- try:
- migrations_module = importlib.import_module(migrations_import_path)
- except ImportError:
- return self.defaults.get("ask_initial", False)
- else:
- if hasattr(migrations_module, "__file__"):
- filenames = os.listdir(os.path.dirname(migrations_module.__file__))
- elif hasattr(migrations_module, "__path__"):
- if len(migrations_module.__path__) > 1:
- return False
- filenames = os.listdir(list(migrations_module.__path__)[0])
- return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
-
- def ask_not_null_addition(self, field_name, model_name):
- """Adding a NOT NULL field to a model."""
- # None means quit
- return None
-
- def ask_not_null_alteration(self, field_name, model_name):
- """Changing a NULL field to NOT NULL."""
- # None means quit
- return None
-
- def ask_rename(self, model_name, old_name, new_name, field_instance):
- """Was this field really renamed?"""
- return self.defaults.get("ask_rename", False)
-
- def ask_rename_model(self, old_model_state, new_model_state):
- """Was this model really renamed?"""
- return self.defaults.get("ask_rename_model", False)
-
- def ask_merge(self, app_label):
- """Do you really want to merge these migrations?"""
- return self.defaults.get("ask_merge", False)
-
- def ask_auto_now_add_addition(self, field_name, model_name):
- """Adding an auto_now_add field to a model."""
- # None means quit
- return None
-
-
-class InteractiveMigrationQuestioner(MigrationQuestioner):
-
- def _boolean_input(self, question, default=None):
- result = input("%s " % question)
- if not result and default is not None:
- return default
- while len(result) < 1 or result[0].lower() not in "yn":
- result = input("Please answer yes or no: ")
- return result[0].lower() == "y"
-
- def _choice_input(self, question, choices):
- print(question)
- for i, choice in enumerate(choices):
- print(" %s) %s" % (i + 1, choice))
- result = input("Select an option: ")
- while True:
- try:
- value = int(result)
- except ValueError:
- pass
- else:
- if 0 < value <= len(choices):
- return value
- result = input("Please select a valid option: ")
-
- def _ask_default(self, default=''):
- """
- Prompt for a default value.
-
- The ``default`` argument allows providing a custom default value (as a
- string) which will be shown to the user and used as the return value
- if the user doesn't provide any other input.
- """
- print("Please enter the default value now, as valid Python")
- if default:
- print(
- "You can accept the default '{}' by pressing 'Enter' or you "
- "can provide another value.".format(default)
- )
- print("The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now")
- print("Type 'exit' to exit this prompt")
- while True:
- if default:
- prompt = "[default: {}] >>> ".format(default)
- else:
- prompt = ">>> "
- code = input(prompt)
- if not code and default:
- code = default
- if not code:
- print("Please enter some code, or 'exit' (with no quotes) to exit.")
- elif code == "exit":
- sys.exit(1)
- else:
- try:
- return eval(code, {}, {"datetime": datetime_safe, "timezone": timezone})
- except (SyntaxError, NameError) as e:
- print("Invalid input: %s" % e)
-
- def ask_not_null_addition(self, field_name, model_name):
- """Adding a NOT NULL field to a model."""
- if not self.dry_run:
- choice = self._choice_input(
- "You are trying to add a non-nullable field '%s' to %s without a default; "
- "we can't do that (the database needs something to populate existing rows).\n"
- "Please select a fix:" % (field_name, model_name),
- [
- ("Provide a one-off default now (will be set on all existing "
- "rows with a null value for this column)"),
- "Quit, and let me add a default in models.py",
- ]
- )
- if choice == 2:
- sys.exit(3)
- else:
- return self._ask_default()
- return None
-
- def ask_not_null_alteration(self, field_name, model_name):
- """Changing a NULL field to NOT NULL."""
- if not self.dry_run:
- choice = self._choice_input(
- "You are trying to change the nullable field '%s' on %s to non-nullable "
- "without a default; we can't do that (the database needs something to "
- "populate existing rows).\n"
- "Please select a fix:" % (field_name, model_name),
- [
- ("Provide a one-off default now (will be set on all existing "
- "rows with a null value for this column)"),
- ("Ignore for now, and let me handle existing rows with NULL myself "
- "(e.g. because you added a RunPython or RunSQL operation to handle "
- "NULL values in a previous data migration)"),
- "Quit, and let me add a default in models.py",
- ]
- )
- if choice == 2:
- return NOT_PROVIDED
- elif choice == 3:
- sys.exit(3)
- else:
- return self._ask_default()
- return None
-
- def ask_rename(self, model_name, old_name, new_name, field_instance):
- """Was this field really renamed?"""
- msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
- return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
- field_instance.__class__.__name__), False)
-
- def ask_rename_model(self, old_model_state, new_model_state):
- """Was this model really renamed?"""
- msg = "Did you rename the %s.%s model to %s? [y/N]"
- return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
- new_model_state.name), False)
-
- def ask_merge(self, app_label):
- return self._boolean_input(
- "\nMerging will only work if the operations printed above do not conflict\n" +
- "with each other (working on different fields or models)\n" +
- "Do you want to merge these migration branches? [y/N]",
- False,
- )
-
- def ask_auto_now_add_addition(self, field_name, model_name):
- """Adding an auto_now_add field to a model."""
- if not self.dry_run:
- choice = self._choice_input(
- "You are trying to add the field '{}' with 'auto_now_add=True' "
- "to {} without a default; the database needs something to "
- "populate existing rows.\n".format(field_name, model_name),
- [
- "Provide a one-off default now (will be set on all "
- "existing rows)",
- "Quit, and let me add a default in models.py",
- ]
- )
- if choice == 2:
- sys.exit(3)
- else:
- return self._ask_default(default='timezone.now')
- return None
-
-
-class NonInteractiveMigrationQuestioner(MigrationQuestioner):
-
- def ask_not_null_addition(self, field_name, model_name):
- # We can't ask the user, so act like the user aborted.
- sys.exit(3)
-
- def ask_not_null_alteration(self, field_name, model_name):
- # We can't ask the user, so set as not provided.
- return NOT_PROVIDED
-
- def ask_auto_now_add_addition(self, field_name, model_name):
- # We can't ask the user, so act like the user aborted.
- sys.exit(3)
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/recorder.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/recorder.py
deleted file mode 100644
index 3a972fe..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/recorder.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from django.apps.registry import Apps
-from django.db import models
-from django.db.utils import DatabaseError
-from django.utils.timezone import now
-
-from .exceptions import MigrationSchemaMissing
-
-
-class MigrationRecorder:
- """
- Deal with storing migration records in the database.
-
- Because this table is actually itself used for dealing with model
- creation, it's the one thing we can't do normally via migrations.
- We manually handle table creation/schema updating (using schema backend)
- and then have a floating model to do queries with.
-
- If a migration is unapplied its row is removed from the table. Having
- a row in the table always means a migration is applied.
- """
-
- class Migration(models.Model):
- app = models.CharField(max_length=255)
- name = models.CharField(max_length=255)
- applied = models.DateTimeField(default=now)
-
- class Meta:
- apps = Apps()
- app_label = "migrations"
- db_table = "django_migrations"
-
- def __str__(self):
- return "Migration %s for %s" % (self.name, self.app)
-
- def __init__(self, connection):
- self.connection = connection
-
- @property
- def migration_qs(self):
- return self.Migration.objects.using(self.connection.alias)
-
- def has_table(self):
- """Return True if the django_migrations table exists."""
- return self.Migration._meta.db_table in self.connection.introspection.table_names(self.connection.cursor())
-
- def ensure_schema(self):
- """Ensure the table exists and has the correct schema."""
- # If the table's there, that's fine - we've never changed its schema
- # in the codebase.
- if self.has_table():
- return
- # Make the table
- try:
- with self.connection.schema_editor() as editor:
- editor.create_model(self.Migration)
- except DatabaseError as exc:
- raise MigrationSchemaMissing("Unable to create the django_migrations table (%s)" % exc)
-
- def applied_migrations(self):
- """Return a set of (app, name) of applied migrations."""
- if self.has_table():
- return {tuple(x) for x in self.migration_qs.values_list('app', 'name')}
- else:
- # If the django_migrations table doesn't exist, then no migrations
- # are applied.
- return set()
-
- def record_applied(self, app, name):
- """Record that a migration was applied."""
- self.ensure_schema()
- self.migration_qs.create(app=app, name=name)
-
- def record_unapplied(self, app, name):
- """Record that a migration was unapplied."""
- self.ensure_schema()
- self.migration_qs.filter(app=app, name=name).delete()
-
- def flush(self):
- """Delete all migration records. Useful for testing migrations."""
- self.migration_qs.all().delete()
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/serializer.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/serializer.py
deleted file mode 100644
index 4df3776..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/serializer.py
+++ /dev/null
@@ -1,371 +0,0 @@
-import builtins
-import collections
-import datetime
-import decimal
-import enum
-import functools
-import math
-import re
-import types
-import uuid
-
-from django.db import models
-from django.db.migrations.operations.base import Operation
-from django.db.migrations.utils import COMPILED_REGEX_TYPE, RegexObject
-from django.utils import datetime_safe
-from django.utils.functional import LazyObject, Promise
-from django.utils.timezone import utc
-from django.utils.version import get_docs_version
-
-
-class BaseSerializer:
- def __init__(self, value):
- self.value = value
-
- def serialize(self):
- raise NotImplementedError('Subclasses of BaseSerializer must implement the serialize() method.')
-
-
-class BaseSequenceSerializer(BaseSerializer):
- def _format(self):
- raise NotImplementedError('Subclasses of BaseSequenceSerializer must implement the _format() method.')
-
- def serialize(self):
- imports = set()
- strings = []
- for item in self.value:
- item_string, item_imports = serializer_factory(item).serialize()
- imports.update(item_imports)
- strings.append(item_string)
- value = self._format()
- return value % (", ".join(strings)), imports
-
-
-class BaseSimpleSerializer(BaseSerializer):
- def serialize(self):
- return repr(self.value), set()
-
-
-class ByteTypeSerializer(BaseSerializer):
- def serialize(self):
- return repr(self.value), set()
-
-
-class DatetimeSerializer(BaseSerializer):
- def serialize(self):
- if self.value.tzinfo is not None and self.value.tzinfo != utc:
- self.value = self.value.astimezone(utc)
- value_repr = repr(self.value).replace("", "utc")
- if isinstance(self.value, datetime_safe.datetime):
- value_repr = "datetime.%s" % value_repr
- imports = ["import datetime"]
- if self.value.tzinfo is not None:
- imports.append("from django.utils.timezone import utc")
- return value_repr, set(imports)
-
-
-class DateSerializer(BaseSerializer):
- def serialize(self):
- value_repr = repr(self.value)
- if isinstance(self.value, datetime_safe.date):
- value_repr = "datetime.%s" % value_repr
- return value_repr, {"import datetime"}
-
-
-class DecimalSerializer(BaseSerializer):
- def serialize(self):
- return repr(self.value), {"from decimal import Decimal"}
-
-
-class DeconstructableSerializer(BaseSerializer):
- @staticmethod
- def serialize_deconstructed(path, args, kwargs):
- name, imports = DeconstructableSerializer._serialize_path(path)
- strings = []
- for arg in args:
- arg_string, arg_imports = serializer_factory(arg).serialize()
- strings.append(arg_string)
- imports.update(arg_imports)
- for kw, arg in sorted(kwargs.items()):
- arg_string, arg_imports = serializer_factory(arg).serialize()
- imports.update(arg_imports)
- strings.append("%s=%s" % (kw, arg_string))
- return "%s(%s)" % (name, ", ".join(strings)), imports
-
- @staticmethod
- def _serialize_path(path):
- module, name = path.rsplit(".", 1)
- if module == "django.db.models":
- imports = {"from django.db import models"}
- name = "models.%s" % name
- else:
- imports = {"import %s" % module}
- name = path
- return name, imports
-
- def serialize(self):
- return self.serialize_deconstructed(*self.value.deconstruct())
-
-
-class DictionarySerializer(BaseSerializer):
- def serialize(self):
- imports = set()
- strings = []
- for k, v in sorted(self.value.items()):
- k_string, k_imports = serializer_factory(k).serialize()
- v_string, v_imports = serializer_factory(v).serialize()
- imports.update(k_imports)
- imports.update(v_imports)
- strings.append((k_string, v_string))
- return "{%s}" % (", ".join("%s: %s" % (k, v) for k, v in strings)), imports
-
-
-class EnumSerializer(BaseSerializer):
- def serialize(self):
- enum_class = self.value.__class__
- module = enum_class.__module__
- imports = {"import %s" % module}
- v_string, v_imports = serializer_factory(self.value.value).serialize()
- imports.update(v_imports)
- return "%s.%s(%s)" % (module, enum_class.__name__, v_string), imports
-
-
-class FloatSerializer(BaseSimpleSerializer):
- def serialize(self):
- if math.isnan(self.value) or math.isinf(self.value):
- return 'float("{}")'.format(self.value), set()
- return super().serialize()
-
-
-class FrozensetSerializer(BaseSequenceSerializer):
- def _format(self):
- return "frozenset([%s])"
-
-
-class FunctionTypeSerializer(BaseSerializer):
- def serialize(self):
- if getattr(self.value, "__self__", None) and isinstance(self.value.__self__, type):
- klass = self.value.__self__
- module = klass.__module__
- return "%s.%s.%s" % (module, klass.__name__, self.value.__name__), {"import %s" % module}
- # Further error checking
- if self.value.__name__ == '':
- raise ValueError("Cannot serialize function: lambda")
- if self.value.__module__ is None:
- raise ValueError("Cannot serialize function %r: No module" % self.value)
-
- module_name = self.value.__module__
-
- if '<' not in self.value.__qualname__: # Qualname can include
- return '%s.%s' % (module_name, self.value.__qualname__), {'import %s' % self.value.__module__}
-
- raise ValueError(
- 'Could not find function %s in %s.\n' % (self.value.__name__, module_name)
- )
-
-
-class FunctoolsPartialSerializer(BaseSerializer):
- def serialize(self):
- imports = {'import functools'}
- # Serialize functools.partial() arguments
- func_string, func_imports = serializer_factory(self.value.func).serialize()
- args_string, args_imports = serializer_factory(self.value.args).serialize()
- keywords_string, keywords_imports = serializer_factory(self.value.keywords).serialize()
- # Add any imports needed by arguments
- imports.update(func_imports)
- imports.update(args_imports)
- imports.update(keywords_imports)
- return (
- "functools.partial(%s, *%s, **%s)" % (
- func_string, args_string, keywords_string,
- ),
- imports,
- )
-
-
-class IterableSerializer(BaseSerializer):
- def serialize(self):
- imports = set()
- strings = []
- for item in self.value:
- item_string, item_imports = serializer_factory(item).serialize()
- imports.update(item_imports)
- strings.append(item_string)
- # When len(strings)==0, the empty iterable should be serialized as
- # "()", not "(,)" because (,) is invalid Python syntax.
- value = "(%s)" if len(strings) != 1 else "(%s,)"
- return value % (", ".join(strings)), imports
-
-
-class ModelFieldSerializer(DeconstructableSerializer):
- def serialize(self):
- attr_name, path, args, kwargs = self.value.deconstruct()
- return self.serialize_deconstructed(path, args, kwargs)
-
-
-class ModelManagerSerializer(DeconstructableSerializer):
- def serialize(self):
- as_manager, manager_path, qs_path, args, kwargs = self.value.deconstruct()
- if as_manager:
- name, imports = self._serialize_path(qs_path)
- return "%s.as_manager()" % name, imports
- else:
- return self.serialize_deconstructed(manager_path, args, kwargs)
-
-
-class OperationSerializer(BaseSerializer):
- def serialize(self):
- from django.db.migrations.writer import OperationWriter
- string, imports = OperationWriter(self.value, indentation=0).serialize()
- # Nested operation, trailing comma is handled in upper OperationWriter._write()
- return string.rstrip(','), imports
-
-
-class RegexSerializer(BaseSerializer):
- def serialize(self):
- imports = {"import re"}
- regex_pattern, pattern_imports = serializer_factory(self.value.pattern).serialize()
- # Turn off default implicit flags (e.g. re.U) because regexes with the
- # same implicit and explicit flags aren't equal.
- flags = self.value.flags ^ re.compile('').flags
- regex_flags, flag_imports = serializer_factory(flags).serialize()
- imports.update(pattern_imports)
- imports.update(flag_imports)
- args = [regex_pattern]
- if flags:
- args.append(regex_flags)
- return "re.compile(%s)" % ', '.join(args), imports
-
-
-class SequenceSerializer(BaseSequenceSerializer):
- def _format(self):
- return "[%s]"
-
-
-class SetSerializer(BaseSequenceSerializer):
- def _format(self):
- # Serialize as a set literal except when value is empty because {}
- # is an empty dict.
- return '{%s}' if self.value else 'set(%s)'
-
-
-class SettingsReferenceSerializer(BaseSerializer):
- def serialize(self):
- return "settings.%s" % self.value.setting_name, {"from django.conf import settings"}
-
-
-class TextTypeSerializer(BaseSerializer):
- def serialize(self):
- return repr(self.value), set()
-
-
-class TimedeltaSerializer(BaseSerializer):
- def serialize(self):
- return repr(self.value), {"import datetime"}
-
-
-class TimeSerializer(BaseSerializer):
- def serialize(self):
- value_repr = repr(self.value)
- if isinstance(self.value, datetime_safe.time):
- value_repr = "datetime.%s" % value_repr
- return value_repr, {"import datetime"}
-
-
-class TupleSerializer(BaseSequenceSerializer):
- def _format(self):
- # When len(value)==0, the empty tuple should be serialized as "()",
- # not "(,)" because (,) is invalid Python syntax.
- return "(%s)" if len(self.value) != 1 else "(%s,)"
-
-
-class TypeSerializer(BaseSerializer):
- def serialize(self):
- special_cases = [
- (models.Model, "models.Model", []),
- ]
- for case, string, imports in special_cases:
- if case is self.value:
- return string, set(imports)
- if hasattr(self.value, "__module__"):
- module = self.value.__module__
- if module == builtins.__name__:
- return self.value.__name__, set()
- else:
- return "%s.%s" % (module, self.value.__name__), {"import %s" % module}
-
-
-class UUIDSerializer(BaseSerializer):
- def serialize(self):
- return "uuid.%s" % repr(self.value), {"import uuid"}
-
-
-def serializer_factory(value):
- from django.db.migrations.writer import SettingsReference
- if isinstance(value, Promise):
- value = str(value)
- elif isinstance(value, LazyObject):
- # The unwrapped value is returned as the first item of the arguments
- # tuple.
- value = value.__reduce__()[1][0]
-
- if isinstance(value, models.Field):
- return ModelFieldSerializer(value)
- if isinstance(value, models.manager.BaseManager):
- return ModelManagerSerializer(value)
- if isinstance(value, Operation):
- return OperationSerializer(value)
- if isinstance(value, type):
- return TypeSerializer(value)
- # Anything that knows how to deconstruct itself.
- if hasattr(value, 'deconstruct'):
- return DeconstructableSerializer(value)
-
- # Unfortunately some of these are order-dependent.
- if isinstance(value, frozenset):
- return FrozensetSerializer(value)
- if isinstance(value, list):
- return SequenceSerializer(value)
- if isinstance(value, set):
- return SetSerializer(value)
- if isinstance(value, tuple):
- return TupleSerializer(value)
- if isinstance(value, dict):
- return DictionarySerializer(value)
- if isinstance(value, enum.Enum):
- return EnumSerializer(value)
- if isinstance(value, datetime.datetime):
- return DatetimeSerializer(value)
- if isinstance(value, datetime.date):
- return DateSerializer(value)
- if isinstance(value, datetime.time):
- return TimeSerializer(value)
- if isinstance(value, datetime.timedelta):
- return TimedeltaSerializer(value)
- if isinstance(value, SettingsReference):
- return SettingsReferenceSerializer(value)
- if isinstance(value, float):
- return FloatSerializer(value)
- if isinstance(value, (bool, int, type(None))):
- return BaseSimpleSerializer(value)
- if isinstance(value, bytes):
- return ByteTypeSerializer(value)
- if isinstance(value, str):
- return TextTypeSerializer(value)
- if isinstance(value, decimal.Decimal):
- return DecimalSerializer(value)
- if isinstance(value, functools.partial):
- return FunctoolsPartialSerializer(value)
- if isinstance(value, (types.FunctionType, types.BuiltinFunctionType, types.MethodType)):
- return FunctionTypeSerializer(value)
- if isinstance(value, collections.Iterable):
- return IterableSerializer(value)
- if isinstance(value, (COMPILED_REGEX_TYPE, RegexObject)):
- return RegexSerializer(value)
- if isinstance(value, uuid.UUID):
- return UUIDSerializer(value)
- raise ValueError(
- "Cannot serialize: %r\nThere are some values Django cannot serialize into "
- "migration files.\nFor more, see https://docs.djangoproject.com/en/%s/"
- "topics/migrations/#migration-serializing" % (value, get_docs_version())
- )
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/state.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/state.py
deleted file mode 100644
index 4dfa3dd..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/state.py
+++ /dev/null
@@ -1,602 +0,0 @@
-import copy
-from collections import OrderedDict
-from contextlib import contextmanager
-
-from django.apps import AppConfig
-from django.apps.registry import Apps, apps as global_apps
-from django.conf import settings
-from django.db import models
-from django.db.models.fields.proxy import OrderWrt
-from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
-from django.db.models.options import DEFAULT_NAMES, normalize_together
-from django.db.models.utils import make_model_tuple
-from django.utils.functional import cached_property
-from django.utils.module_loading import import_string
-from django.utils.version import get_docs_version
-
-from .exceptions import InvalidBasesError
-
-
-def _get_app_label_and_model_name(model, app_label=''):
- if isinstance(model, str):
- split = model.split('.', 1)
- return tuple(split) if len(split) == 2 else (app_label, split[0])
- else:
- return model._meta.app_label, model._meta.model_name
-
-
-def _get_related_models(m):
- """Return all models that have a direct relationship to the given model."""
- related_models = [
- subclass for subclass in m.__subclasses__()
- if issubclass(subclass, models.Model)
- ]
- related_fields_models = set()
- for f in m._meta.get_fields(include_parents=True, include_hidden=True):
- if f.is_relation and f.related_model is not None and not isinstance(f.related_model, str):
- related_fields_models.add(f.model)
- related_models.append(f.related_model)
- # Reverse accessors of foreign keys to proxy models are attached to their
- # concrete proxied model.
- opts = m._meta
- if opts.proxy and m in related_fields_models:
- related_models.append(opts.concrete_model)
- return related_models
-
-
-def get_related_models_tuples(model):
- """
- Return a list of typical (app_label, model_name) tuples for all related
- models for the given model.
- """
- return {
- (rel_mod._meta.app_label, rel_mod._meta.model_name)
- for rel_mod in _get_related_models(model)
- }
-
-
-def get_related_models_recursive(model):
- """
- Return all models that have a direct or indirect relationship
- to the given model.
-
- Relationships are either defined by explicit relational fields, like
- ForeignKey, ManyToManyField or OneToOneField, or by inheriting from another
- model (a superclass is related to its subclasses, but not vice versa). Note,
- however, that a model inheriting from a concrete model is also related to
- its superclass through the implicit *_ptr OneToOneField on the subclass.
- """
- seen = set()
- queue = _get_related_models(model)
- for rel_mod in queue:
- rel_app_label, rel_model_name = rel_mod._meta.app_label, rel_mod._meta.model_name
- if (rel_app_label, rel_model_name) in seen:
- continue
- seen.add((rel_app_label, rel_model_name))
- queue.extend(_get_related_models(rel_mod))
- return seen - {(model._meta.app_label, model._meta.model_name)}
-
-
-class ProjectState:
- """
- Represent the entire project's overall state. This is the item that is
- passed around - do it here rather than at the app level so that cross-app
- FKs/etc. resolve properly.
- """
-
- def __init__(self, models=None, real_apps=None):
- self.models = models or {}
- # Apps to include from main registry, usually unmigrated ones
- self.real_apps = real_apps or []
- self.is_delayed = False
-
- def add_model(self, model_state):
- app_label, model_name = model_state.app_label, model_state.name_lower
- self.models[(app_label, model_name)] = model_state
- if 'apps' in self.__dict__: # hasattr would cache the property
- self.reload_model(app_label, model_name)
-
- def remove_model(self, app_label, model_name):
- del self.models[app_label, model_name]
- if 'apps' in self.__dict__: # hasattr would cache the property
- self.apps.unregister_model(app_label, model_name)
- # Need to do this explicitly since unregister_model() doesn't clear
- # the cache automatically (#24513)
- self.apps.clear_cache()
-
- def _find_reload_model(self, app_label, model_name, delay=False):
- if delay:
- self.is_delayed = True
-
- related_models = set()
-
- try:
- old_model = self.apps.get_model(app_label, model_name)
- except LookupError:
- pass
- else:
- # Get all relations to and from the old model before reloading,
- # as _meta.apps may change
- if delay:
- related_models = get_related_models_tuples(old_model)
- else:
- related_models = get_related_models_recursive(old_model)
-
- # Get all outgoing references from the model to be rendered
- model_state = self.models[(app_label, model_name)]
- # Directly related models are the models pointed to by ForeignKeys,
- # OneToOneFields, and ManyToManyFields.
- direct_related_models = set()
- for name, field in model_state.fields:
- if field.is_relation:
- if field.remote_field.model == RECURSIVE_RELATIONSHIP_CONSTANT:
- continue
- rel_app_label, rel_model_name = _get_app_label_and_model_name(field.related_model, app_label)
- direct_related_models.add((rel_app_label, rel_model_name.lower()))
-
- # For all direct related models recursively get all related models.
- related_models.update(direct_related_models)
- for rel_app_label, rel_model_name in direct_related_models:
- try:
- rel_model = self.apps.get_model(rel_app_label, rel_model_name)
- except LookupError:
- pass
- else:
- if delay:
- related_models.update(get_related_models_tuples(rel_model))
- else:
- related_models.update(get_related_models_recursive(rel_model))
-
- # Include the model itself
- related_models.add((app_label, model_name))
-
- return related_models
-
- def reload_model(self, app_label, model_name, delay=False):
- if 'apps' in self.__dict__: # hasattr would cache the property
- related_models = self._find_reload_model(app_label, model_name, delay)
- self._reload(related_models)
-
- def reload_models(self, models, delay=True):
- if 'apps' in self.__dict__: # hasattr would cache the property
- related_models = set()
- for app_label, model_name in models:
- related_models.update(self._find_reload_model(app_label, model_name, delay))
- self._reload(related_models)
-
- def _reload(self, related_models):
- # Unregister all related models
- with self.apps.bulk_update():
- for rel_app_label, rel_model_name in related_models:
- self.apps.unregister_model(rel_app_label, rel_model_name)
-
- states_to_be_rendered = []
- # Gather all models states of those models that will be rerendered.
- # This includes:
- # 1. All related models of unmigrated apps
- for model_state in self.apps.real_models:
- if (model_state.app_label, model_state.name_lower) in related_models:
- states_to_be_rendered.append(model_state)
-
- # 2. All related models of migrated apps
- for rel_app_label, rel_model_name in related_models:
- try:
- model_state = self.models[rel_app_label, rel_model_name]
- except KeyError:
- pass
- else:
- states_to_be_rendered.append(model_state)
-
- # Render all models
- self.apps.render_multiple(states_to_be_rendered)
-
- def clone(self):
- """Return an exact copy of this ProjectState."""
- new_state = ProjectState(
- models={k: v.clone() for k, v in self.models.items()},
- real_apps=self.real_apps,
- )
- if 'apps' in self.__dict__:
- new_state.apps = self.apps.clone()
- new_state.is_delayed = self.is_delayed
- return new_state
-
- def clear_delayed_apps_cache(self):
- if self.is_delayed and 'apps' in self.__dict__:
- del self.__dict__['apps']
-
- @cached_property
- def apps(self):
- return StateApps(self.real_apps, self.models)
-
- @property
- def concrete_apps(self):
- self.apps = StateApps(self.real_apps, self.models, ignore_swappable=True)
- return self.apps
-
- @classmethod
- def from_apps(cls, apps):
- """Take an Apps and return a ProjectState matching it."""
- app_models = {}
- for model in apps.get_models(include_swapped=True):
- model_state = ModelState.from_model(model)
- app_models[(model_state.app_label, model_state.name_lower)] = model_state
- return cls(app_models)
-
- def __eq__(self, other):
- return self.models == other.models and set(self.real_apps) == set(other.real_apps)
-
-
-class AppConfigStub(AppConfig):
- """Stub of an AppConfig. Only provides a label and a dict of models."""
- # Not used, but required by AppConfig.__init__
- path = ''
-
- def __init__(self, label):
- self.label = label
- # App-label and app-name are not the same thing, so technically passing
- # in the label here is wrong. In practice, migrations don't care about
- # the app name, but we need something unique, and the label works fine.
- super().__init__(label, None)
-
- def import_models(self):
- self.models = self.apps.all_models[self.label]
-
-
-class StateApps(Apps):
- """
- Subclass of the global Apps registry class to better handle dynamic model
- additions and removals.
- """
- def __init__(self, real_apps, models, ignore_swappable=False):
- # Any apps in self.real_apps should have all their models included
- # in the render. We don't use the original model instances as there
- # are some variables that refer to the Apps object.
- # FKs/M2Ms from real apps are also not included as they just
- # mess things up with partial states (due to lack of dependencies)
- self.real_models = []
- for app_label in real_apps:
- app = global_apps.get_app_config(app_label)
- for model in app.get_models():
- self.real_models.append(ModelState.from_model(model, exclude_rels=True))
- # Populate the app registry with a stub for each application.
- app_labels = {model_state.app_label for model_state in models.values()}
- app_configs = [AppConfigStub(label) for label in sorted(real_apps + list(app_labels))]
- super().__init__(app_configs)
-
- # The lock gets in the way of copying as implemented in clone(), which
- # is called whenever Django duplicates a StateApps before updating it.
- self._lock = None
-
- self.render_multiple(list(models.values()) + self.real_models)
-
- # There shouldn't be any operations pending at this point.
- from django.core.checks.model_checks import _check_lazy_references
- ignore = {make_model_tuple(settings.AUTH_USER_MODEL)} if ignore_swappable else set()
- errors = _check_lazy_references(self, ignore=ignore)
- if errors:
- raise ValueError("\n".join(error.msg for error in errors))
-
- @contextmanager
- def bulk_update(self):
- # Avoid clearing each model's cache for each change. Instead, clear
- # all caches when we're finished updating the model instances.
- ready = self.ready
- self.ready = False
- try:
- yield
- finally:
- self.ready = ready
- self.clear_cache()
-
- def render_multiple(self, model_states):
- # We keep trying to render the models in a loop, ignoring invalid
- # base errors, until the size of the unrendered models doesn't
- # decrease by at least one, meaning there's a base dependency loop/
- # missing base.
- if not model_states:
- return
- # Prevent that all model caches are expired for each render.
- with self.bulk_update():
- unrendered_models = model_states
- while unrendered_models:
- new_unrendered_models = []
- for model in unrendered_models:
- try:
- model.render(self)
- except InvalidBasesError:
- new_unrendered_models.append(model)
- if len(new_unrendered_models) == len(unrendered_models):
- raise InvalidBasesError(
- "Cannot resolve bases for %r\nThis can happen if you are inheriting models from an "
- "app with migrations (e.g. contrib.auth)\n in an app with no migrations; see "
- "https://docs.djangoproject.com/en/%s/topics/migrations/#dependencies "
- "for more" % (new_unrendered_models, get_docs_version())
- )
- unrendered_models = new_unrendered_models
-
- def clone(self):
- """Return a clone of this registry."""
- clone = StateApps([], {})
- clone.all_models = copy.deepcopy(self.all_models)
- clone.app_configs = copy.deepcopy(self.app_configs)
- # Set the pointer to the correct app registry.
- for app_config in clone.app_configs.values():
- app_config.apps = clone
- # No need to actually clone them, they'll never change
- clone.real_models = self.real_models
- return clone
-
- def register_model(self, app_label, model):
- self.all_models[app_label][model._meta.model_name] = model
- if app_label not in self.app_configs:
- self.app_configs[app_label] = AppConfigStub(app_label)
- self.app_configs[app_label].apps = self
- self.app_configs[app_label].models = OrderedDict()
- self.app_configs[app_label].models[model._meta.model_name] = model
- self.do_pending_operations(model)
- self.clear_cache()
-
- def unregister_model(self, app_label, model_name):
- try:
- del self.all_models[app_label][model_name]
- del self.app_configs[app_label].models[model_name]
- except KeyError:
- pass
-
-
-class ModelState:
- """
- Represent a Django Model. Don't use the actual Model class as it's not
- designed to have its options changed - instead, mutate this one and then
- render it into a Model as required.
-
- Note that while you are allowed to mutate .fields, you are not allowed
- to mutate the Field instances inside there themselves - you must instead
- assign new ones, as these are not detached during a clone.
- """
-
- def __init__(self, app_label, name, fields, options=None, bases=None, managers=None):
- self.app_label = app_label
- self.name = name
- self.fields = fields
- self.options = options or {}
- self.options.setdefault('indexes', [])
- self.bases = bases or (models.Model, )
- self.managers = managers or []
- # Sanity-check that fields is NOT a dict. It must be ordered.
- if isinstance(self.fields, dict):
- raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.")
- for name, field in fields:
- # Sanity-check that fields are NOT already bound to a model.
- if hasattr(field, 'model'):
- raise ValueError(
- 'ModelState.fields cannot be bound to a model - "%s" is.' % name
- )
- # Sanity-check that relation fields are NOT referring to a model class.
- if field.is_relation and hasattr(field.related_model, '_meta'):
- raise ValueError(
- 'ModelState.fields cannot refer to a model class - "%s.to" does. '
- 'Use a string reference instead.' % name
- )
- if field.many_to_many and hasattr(field.remote_field.through, '_meta'):
- raise ValueError(
- 'ModelState.fields cannot refer to a model class - "%s.through" does. '
- 'Use a string reference instead.' % name
- )
- # Sanity-check that indexes have their name set.
- for index in self.options['indexes']:
- if not index.name:
- raise ValueError(
- "Indexes passed to ModelState require a name attribute. "
- "%r doesn't have one." % index
- )
-
- @cached_property
- def name_lower(self):
- return self.name.lower()
-
- @classmethod
- def from_model(cls, model, exclude_rels=False):
- """Given a model, return a ModelState representing it."""
- # Deconstruct the fields
- fields = []
- for field in model._meta.local_fields:
- if getattr(field, "remote_field", None) and exclude_rels:
- continue
- if isinstance(field, OrderWrt):
- continue
- name = field.name
- try:
- fields.append((name, field.clone()))
- except TypeError as e:
- raise TypeError("Couldn't reconstruct field %s on %s: %s" % (
- name,
- model._meta.label,
- e,
- ))
- if not exclude_rels:
- for field in model._meta.local_many_to_many:
- name = field.name
- try:
- fields.append((name, field.clone()))
- except TypeError as e:
- raise TypeError("Couldn't reconstruct m2m field %s on %s: %s" % (
- name,
- model._meta.object_name,
- e,
- ))
- # Extract the options
- options = {}
- for name in DEFAULT_NAMES:
- # Ignore some special options
- if name in ["apps", "app_label"]:
- continue
- elif name in model._meta.original_attrs:
- if name == "unique_together":
- ut = model._meta.original_attrs["unique_together"]
- options[name] = set(normalize_together(ut))
- elif name == "index_together":
- it = model._meta.original_attrs["index_together"]
- options[name] = set(normalize_together(it))
- elif name == "indexes":
- indexes = [idx.clone() for idx in model._meta.indexes]
- for index in indexes:
- if not index.name:
- index.set_name_with_model(model)
- options['indexes'] = indexes
- else:
- options[name] = model._meta.original_attrs[name]
- # If we're ignoring relationships, remove all field-listing model
- # options (that option basically just means "make a stub model")
- if exclude_rels:
- for key in ["unique_together", "index_together", "order_with_respect_to"]:
- if key in options:
- del options[key]
- # Private fields are ignored, so remove options that refer to them.
- elif options.get('order_with_respect_to') in {field.name for field in model._meta.private_fields}:
- del options['order_with_respect_to']
-
- def flatten_bases(model):
- bases = []
- for base in model.__bases__:
- if hasattr(base, "_meta") and base._meta.abstract:
- bases.extend(flatten_bases(base))
- else:
- bases.append(base)
- return bases
-
- # We can't rely on __mro__ directly because we only want to flatten
- # abstract models and not the whole tree. However by recursing on
- # __bases__ we may end up with duplicates and ordering issues, we
- # therefore discard any duplicates and reorder the bases according
- # to their index in the MRO.
- flattened_bases = sorted(set(flatten_bases(model)), key=lambda x: model.__mro__.index(x))
-
- # Make our record
- bases = tuple(
- (
- base._meta.label_lower
- if hasattr(base, "_meta") else
- base
- )
- for base in flattened_bases
- )
- # Ensure at least one base inherits from models.Model
- if not any((isinstance(base, str) or issubclass(base, models.Model)) for base in bases):
- bases = (models.Model,)
-
- managers = []
- manager_names = set()
- default_manager_shim = None
- for manager in model._meta.managers:
- if manager.name in manager_names:
- # Skip overridden managers.
- continue
- elif manager.use_in_migrations:
- # Copy managers usable in migrations.
- new_manager = copy.copy(manager)
- new_manager._set_creation_counter()
- elif manager is model._base_manager or manager is model._default_manager:
- # Shim custom managers used as default and base managers.
- new_manager = models.Manager()
- new_manager.model = manager.model
- new_manager.name = manager.name
- if manager is model._default_manager:
- default_manager_shim = new_manager
- else:
- continue
- manager_names.add(manager.name)
- managers.append((manager.name, new_manager))
-
- # Ignore a shimmed default manager called objects if it's the only one.
- if managers == [('objects', default_manager_shim)]:
- managers = []
-
- # Construct the new ModelState
- return cls(
- model._meta.app_label,
- model._meta.object_name,
- fields,
- options,
- bases,
- managers,
- )
-
- def construct_managers(self):
- """Deep-clone the managers using deconstruction."""
- # Sort all managers by their creation counter
- sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter)
- for mgr_name, manager in sorted_managers:
- as_manager, manager_path, qs_path, args, kwargs = manager.deconstruct()
- if as_manager:
- qs_class = import_string(qs_path)
- yield mgr_name, qs_class.as_manager()
- else:
- manager_class = import_string(manager_path)
- yield mgr_name, manager_class(*args, **kwargs)
-
- def clone(self):
- """Return an exact copy of this ModelState."""
- return self.__class__(
- app_label=self.app_label,
- name=self.name,
- fields=list(self.fields),
- # Since options are shallow-copied here, operations such as
- # AddIndex must replace their option (e.g 'indexes') rather
- # than mutating it.
- options=dict(self.options),
- bases=self.bases,
- managers=list(self.managers),
- )
-
- def render(self, apps):
- """Create a Model object from our current state into the given apps."""
- # First, make a Meta object
- meta_contents = {'app_label': self.app_label, "apps": apps}
- meta_contents.update(self.options)
- meta = type("Meta", (), meta_contents)
- # Then, work out our bases
- try:
- bases = tuple(
- (apps.get_model(base) if isinstance(base, str) else base)
- for base in self.bases
- )
- except LookupError:
- raise InvalidBasesError("Cannot resolve one or more bases from %r" % (self.bases,))
- # Turn fields into a dict for the body, add other bits
- body = {name: field.clone() for name, field in self.fields}
- body['Meta'] = meta
- body['__module__'] = "__fake__"
-
- # Restore managers
- body.update(self.construct_managers())
- # Then, make a Model object (apps.register_model is called in __new__)
- return type(self.name, bases, body)
-
- def get_field_by_name(self, name):
- for fname, field in self.fields:
- if fname == name:
- return field
- raise ValueError("No field called %s on model %s" % (name, self.name))
-
- def get_index_by_name(self, name):
- for index in self.options['indexes']:
- if index.name == name:
- return index
- raise ValueError("No index named %s on model %s" % (name, self.name))
-
- def __repr__(self):
- return "<%s: '%s.%s'>" % (self.__class__.__name__, self.app_label, self.name)
-
- def __eq__(self, other):
- return (
- (self.app_label == other.app_label) and
- (self.name == other.name) and
- (len(self.fields) == len(other.fields)) and
- all((k1 == k2 and (f1.deconstruct()[1:] == f2.deconstruct()[1:]))
- for (k1, f1), (k2, f2) in zip(self.fields, other.fields)) and
- (self.options == other.options) and
- (self.bases == other.bases) and
- (self.managers == other.managers)
- )
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/topological_sort.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/topological_sort.py
deleted file mode 100644
index 7b1ec7c..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/topological_sort.py
+++ /dev/null
@@ -1,32 +0,0 @@
-def topological_sort_as_sets(dependency_graph):
- """
- Variation of Kahn's algorithm (1962) that returns sets.
-
- Take a dependency graph as a dictionary of node => dependencies.
-
- Yield sets of items in topological order, where the first set contains
- all nodes without dependencies, and each following set contains all
- nodes that may depend on the nodes only in the previously yielded sets.
- """
- todo = dependency_graph.copy()
- while todo:
- current = {node for node, deps in todo.items() if len(deps) == 0}
-
- if not current:
- raise ValueError('Cyclic dependency in graph: {}'.format(
- ', '.join(repr(x) for x in todo.items())))
-
- yield current
-
- # remove current from todo's nodes & dependencies
- todo = {node: (dependencies - current) for node, dependencies in
- todo.items() if node not in current}
-
-
-def stable_topological_sort(l, dependency_graph):
- result = []
- for layer in topological_sort_as_sets(dependency_graph):
- for node in l:
- if node in layer:
- result.append(node)
- return result
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/utils.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/utils.py
deleted file mode 100644
index 8939794..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/utils.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import datetime
-import re
-
-COMPILED_REGEX_TYPE = type(re.compile(''))
-
-
-class RegexObject:
- def __init__(self, obj):
- self.pattern = obj.pattern
- self.flags = obj.flags
-
- def __eq__(self, other):
- return self.pattern == other.pattern and self.flags == other.flags
-
-
-def get_migration_name_timestamp():
- return datetime.datetime.now().strftime("%Y%m%d_%H%M")
diff --git a/thesisenv/lib/python3.6/site-packages/django/db/migrations/writer.py b/thesisenv/lib/python3.6/site-packages/django/db/migrations/writer.py
deleted file mode 100644
index aa296db..0000000
--- a/thesisenv/lib/python3.6/site-packages/django/db/migrations/writer.py
+++ /dev/null
@@ -1,296 +0,0 @@
-import os
-import re
-from importlib import import_module
-
-from django import get_version
-from django.apps import apps
-from django.db import migrations
-from django.db.migrations.loader import MigrationLoader
-from django.db.migrations.serializer import serializer_factory
-from django.utils.inspect import get_func_args
-from django.utils.module_loading import module_dir
-from django.utils.timezone import now
-
-
-class SettingsReference(str):
- """
- Special subclass of string which actually references a current settings
- value. It's treated as the value in memory, but serializes out to a
- settings.NAME attribute reference.
- """
-
- def __new__(self, value, setting_name):
- return str.__new__(self, value)
-
- def __init__(self, value, setting_name):
- self.setting_name = setting_name
-
-
-class OperationWriter:
- def __init__(self, operation, indentation=2):
- self.operation = operation
- self.buff = []
- self.indentation = indentation
-
- def serialize(self):
-
- def _write(_arg_name, _arg_value):
- if (_arg_name in self.operation.serialization_expand_args and
- isinstance(_arg_value, (list, tuple, dict))):
- if isinstance(_arg_value, dict):
- self.feed('%s={' % _arg_name)
- self.indent()
- for key, value in _arg_value.items():
- key_string, key_imports = MigrationWriter.serialize(key)
- arg_string, arg_imports = MigrationWriter.serialize(value)
- args = arg_string.splitlines()
- if len(args) > 1:
- self.feed('%s: %s' % (key_string, args[0]))
- for arg in args[1:-1]:
- self.feed(arg)
- self.feed('%s,' % args[-1])
- else:
- self.feed('%s: %s,' % (key_string, arg_string))
- imports.update(key_imports)
- imports.update(arg_imports)
- self.unindent()
- self.feed('},')
- else:
- self.feed('%s=[' % _arg_name)
- self.indent()
- for item in _arg_value:
- arg_string, arg_imports = MigrationWriter.serialize(item)
- args = arg_string.splitlines()
- if len(args) > 1:
- for arg in args[:-1]:
- self.feed(arg)
- self.feed('%s,' % args[-1])
- else:
- self.feed('%s,' % arg_string)
- imports.update(arg_imports)
- self.unindent()
- self.feed('],')
- else:
- arg_string, arg_imports = MigrationWriter.serialize(_arg_value)
- args = arg_string.splitlines()
- if len(args) > 1:
- self.feed('%s=%s' % (_arg_name, args[0]))
- for arg in args[1:-1]:
- self.feed(arg)
- self.feed('%s,' % args[-1])
- else:
- self.feed('%s=%s,' % (_arg_name, arg_string))
- imports.update(arg_imports)
-
- imports = set()
- name, args, kwargs = self.operation.deconstruct()
- operation_args = get_func_args(self.operation.__init__)
-
- # See if this operation is in django.db.migrations. If it is,
- # We can just use the fact we already have that imported,
- # otherwise, we need to add an import for the operation class.
- if getattr(migrations, name, None) == self.operation.__class__:
- self.feed('migrations.%s(' % name)
- else:
- imports.add('import %s' % (self.operation.__class__.__module__))
- self.feed('%s.%s(' % (self.operation.__class__.__module__, name))
-
- self.indent()
-
- for i, arg in enumerate(args):
- arg_value = arg
- arg_name = operation_args[i]
- _write(arg_name, arg_value)
-
- i = len(args)
- # Only iterate over remaining arguments
- for arg_name in operation_args[i:]:
- if arg_name in kwargs: # Don't sort to maintain signature order
- arg_value = kwargs[arg_name]
- _write(arg_name, arg_value)
-
- self.unindent()
- self.feed('),')
- return self.render(), imports
-
- def indent(self):
- self.indentation += 1
-
- def unindent(self):
- self.indentation -= 1
-
- def feed(self, line):
- self.buff.append(' ' * (self.indentation * 4) + line)
-
- def render(self):
- return '\n'.join(self.buff)
-
-
-class MigrationWriter:
- """
- Take a Migration instance and is able to produce the contents
- of the migration file from it.
- """
-
- def __init__(self, migration):
- self.migration = migration
- self.needs_manual_porting = False
-
- def as_string(self):
- """Return a string of the file contents."""
- items = {
- "replaces_str": "",
- "initial_str": "",
- }
-
- imports = set()
-
- # Deconstruct operations
- operations = []
- for operation in self.migration.operations:
- operation_string, operation_imports = OperationWriter(operation).serialize()
- imports.update(operation_imports)
- operations.append(operation_string)
- items["operations"] = "\n".join(operations) + "\n" if operations else ""
-
- # Format dependencies and write out swappable dependencies right
- dependencies = []
- for dependency in self.migration.dependencies:
- if dependency[0] == "__setting__":
- dependencies.append(" migrations.swappable_dependency(settings.%s)," % dependency[1])
- imports.add("from django.conf import settings")
- else:
- dependencies.append(" %s," % self.serialize(dependency)[0])
- items["dependencies"] = "\n".join(dependencies) + "\n" if dependencies else ""
-
- # Format imports nicely, swapping imports of functions from migration files
- # for comments
- migration_imports = set()
- for line in list(imports):
- if re.match(r"^import (.*)\.\d+[^\s]*$", line):
- migration_imports.add(line.split("import")[1].strip())
- imports.remove(line)
- self.needs_manual_porting = True
-
- # django.db.migrations is always used, but models import may not be.
- # If models import exists, merge it with migrations import.
- if "from django.db import models" in imports:
- imports.discard("from django.db import models")
- imports.add("from django.db import migrations, models")
- else:
- imports.add("from django.db import migrations")
-
- # Sort imports by the package / module to be imported (the part after
- # "from" in "from ... import ..." or after "import" in "import ...").
- sorted_imports = sorted(imports, key=lambda i: i.split()[1])
- items["imports"] = "\n".join(sorted_imports) + "\n" if imports else ""
- if migration_imports:
- items["imports"] += (
- "\n\n# Functions from the following migrations need manual "
- "copying.\n# Move them and any dependencies into this file, "
- "then update the\n# RunPython operations to refer to the local "
- "versions:\n# %s"
- ) % "\n# ".join(sorted(migration_imports))
- # If there's a replaces, make a string for it
- if self.migration.replaces:
- items['replaces_str'] = "\n replaces = %s\n" % self.serialize(self.migration.replaces)[0]
- # Hinting that goes into comment
- items.update(
- version=get_version(),
- timestamp=now().strftime("%Y-%m-%d %H:%M"),
- )
-
- if self.migration.initial:
- items['initial_str'] = "\n initial = True\n"
-
- return MIGRATION_TEMPLATE % items
-
- @property
- def basedir(self):
- migrations_package_name, _ = MigrationLoader.migrations_module(self.migration.app_label)
-
- if migrations_package_name is None:
- raise ValueError(
- "Django can't create migrations for app '%s' because "
- "migrations have been disabled via the MIGRATION_MODULES "
- "setting." % self.migration.app_label
- )
-
- # See if we can import the migrations module directly
- try:
- migrations_module = import_module(migrations_package_name)
- except ImportError:
- pass
- else:
- try:
- return module_dir(migrations_module)
- except ValueError:
- pass
-
- # Alright, see if it's a direct submodule of the app
- app_config = apps.get_app_config(self.migration.app_label)
- maybe_app_name, _, migrations_package_basename = migrations_package_name.rpartition(".")
- if app_config.name == maybe_app_name:
- return os.path.join(app_config.path, migrations_package_basename)
-
- # In case of using MIGRATION_MODULES setting and the custom package
- # doesn't exist, create one, starting from an existing package
- existing_dirs, missing_dirs = migrations_package_name.split("."), []
- while existing_dirs:
- missing_dirs.insert(0, existing_dirs.pop(-1))
- try:
- base_module = import_module(".".join(existing_dirs))
- except ImportError:
- continue
- else:
- try:
- base_dir = module_dir(base_module)
- except ValueError:
- continue
- else:
- break
- else:
- raise ValueError(
- "Could not locate an appropriate location to create "
- "migrations package %s. Make sure the toplevel "
- "package exists and can be imported." %
- migrations_package_name)
-
- final_dir = os.path.join(base_dir, *missing_dirs)
- if not os.path.isdir(final_dir):
- os.makedirs(final_dir)
- for missing_dir in missing_dirs:
- base_dir = os.path.join(base_dir, missing_dir)
- with open(os.path.join(base_dir, "__init__.py"), "w"):
- pass
-
- return final_dir
-
- @property
- def filename(self):
- return "%s.py" % self.migration.name
-
- @property
- def path(self):
- return os.path.join(self.basedir, self.filename)
-
- @classmethod
- def serialize(cls, value):
- return serializer_factory(value).serialize()
-
-
-MIGRATION_TEMPLATE = """\
-# Generated by Django %(version)s on %(timestamp)s
-
-%(imports)s
-
-class Migration(migrations.Migration):
-%(replaces_str)s%(initial_str)s
- dependencies = [
-%(dependencies)s\
- ]
-
- operations = [
-%(operations)s\
- ]
-"""