Object has no attribute error even though it does

In my djangoapp, the initial migration creates a model called UserProfile with a field called lowest_priority_to_alert. There is then later a migration that loads in some fixture data using a custom RunPython migration so that it can be used in our e2e tests. Finally there is a migration that removes the lowest_priority_to_alert field.

When I run these migrations, I can see that the initial migration is run, then the migration to load in the fixture data. However, I get the following error: AttributeError: 'UserProfile' object has no attribute 'lowest_priority_to_alert'

I will show my migration bellow but I have added some logging, the first to see which fields the UserProfile model currently has and the second to see what the fixture data is. All this looks normal:

['id', 'deleted_at', 'account_type', 'job_title', 'timezone', 'locale', 'api_only', 'api_burst_rate', 'api_sustained_rate', 'saved_frontend_settings_v2', 'lowest_priority_to_alert', 'mobile_push_notifications', 'dark_hour_start', 'dark_hour_end', 'is_fake', 'is_hidden', 'mobile_tabbar_options', 'menu_options', 'fixed_frontend_settings', 'disable_screen_recording', 'business', 'group', 'user', 'clock_format', 'csv_delimiter', 'temperature_unit', 'is_superstaff_user', 'set_of_cards', 'webapp_cards_v3', 'feature_flags']
[
{
  "model": "collector.userprofile",
  "pk": 1,
  "fields": {
    "deleted_at": null,
    "user": 1,
    "business": 795,
    "group": null,
    "account_type": 1,
    "job_title": null,
    "timezone": "Europe/London",
    "locale": "en-GB",
    "api_only": false,
    "api_burst_rate": 200,
    "api_sustained_rate": 10000,
    "saved_frontend_settings_v2": {
      "sales": {
        "tax": 0,
        "comps": 0,
        "promos": 0,
        "service": 0
      },
      "labour": {
        "showSalaried": 1,
        "labourOverhead": 0,
        "spreadSalaried": 0
      }
    },
    "lowest_priority_to_alert": 3000,
    "mobile_push_notifications": true,
    "dark_hour_start": null,
    "dark_hour_end": null,
    "is_superstaff_user": false,
    "is_fake": false,
    "is_hidden": false,
    "mobile_tabbar_options": [
      "home",
      "sales",
      "social",
      "employee",
      "more"
    ],
    "menu_options": [],
    "fixed_frontend_settings": null,
    "disable_screen_recording": false,
    "temperature_unit": null,
    "clock_format": null,
    "csv_delimiter": null,
    "webapp_cards_v3": [],
    "set_of_cards": [],
    "feature_flags": []
  }
}
]

I am very confused as to what is happening. I have triple checked that my migrations are running in the correct order. Here is my migration:

from pathlib import Path

from django.conf import settings
from django.core import serializers
from django.db import connections, migrations

from common.sharding.resharding.table_relationships import get_fk_models


def create_central_test_data(folder_path: Path):
    def forwards_code(apps, schema_editor):
        if not settings.TESTING:
            return
        db_alias = schema_editor.connection.alias
        if db_alias != "default":
            return

        models = get_fk_models(get_central_models=True, get_sharded_models=False)

        for model in reversed(models):
            if not folder_path.joinpath(f"{model._meta.db_table}.json").is_file():
                continue
            model = apps.get_model(model._meta.label)
            print("deleting from", model)
            if hasattr(model, "all_objects"):
                model.all_objects.using(db_alias).hard_delete()
            elif hasattr(model, "objects"):
                model.objects.using(db_alias).delete()
            else:
                model.protected_objects.using(db_alias).delete()

        for model in models:
            if not folder_path.joinpath(f"{model._meta.db_table}.json").is_file():
                continue
            model = apps.get_model(model._meta.label)
            print("writing to", model)

            # drop trigger so we can insert IDs with the same value as stored
            drop_trigger_sql = f"DROP TRIGGER IF EXISTS public_{model._meta.db_table}_insert ON {model._meta.db_table};"

            with connections["default"].cursor() as cursor:
                cursor.execute(drop_trigger_sql)

            with open(folder_path.joinpath(f"{model._meta.db_table}.json"), "r") as f:
                serialized = f.read()

            deserialized = serializers.deserialize(
                "json", serialized, ignorenonexistent=True
            )

            if model._meta.db_table == "collector_userprofile":
                print(
                    f"There current {model._meta.db_table} count is {model.objects.count()}"
                )
                print([f.name for f in model._meta.get_fields()])
                print(serialized)

            if hasattr(model, "objects"):
                model.objects.using(db_alias).bulk_create(
                    d.object for d in deserialized
                )
            else:
                model.protected_objects.using(db_alias).bulk_create(
                    d.object for d in deserialized
                )

            set_id_sequence_sql = f"""
            SELECT setval(
                'public.{model._meta.db_table}_id_seq',
                COALESCE((SELECT MAX(id)+1 FROM public.{model._meta.db_table}), 1), 
                false
            );
            """

            with connections["default"].cursor() as cursor:
                cursor.execute(set_id_sequence_sql)

    return {"code": forwards_code, "reverse_code": migrations.RunPython.noop}

Does it work if you run migrate up to (and including) your data migration, but not including the subsequent migration?

<conjecture>
I’m guessing that how you’re accessing the models aren’t getting the Historical models, which means the definitions of those models you are retrieving are the “current” version of the models and not the versions that exist as of the time this migration is being executed.
</conjecture>

Thanks for the reply.

I could try running the up to and including the data migration, but as thats the one thats failing I wonder what that will actually do.

This is my get_fk_models code, which I think I’m write in saying isn’t getting the historical models? Although I’m not sure how I would get those

def get_fk_models(
    models: list = None,
    ignore_apps: list = None,
    get_central_models: bool = False,
    get_sharded_models: bool = True,
    ignore_proxy_models: bool = True,
):
    get_all_models = get_central_models and get_sharded_models

    if models is None:
        models = apps.get_models(include_auto_created=True, include_swapped=True)

    if ignore_apps is None:
        ignore_apps = [
            "oauth2_provider",
            "django_otp",
            "two_factor",
            "otp_static",
            "otp_totp",
            "socialaccount",
            "authtoken",
            "django_celery_results",
            "django_celery_beat",
            "sessions",
        ]

        fk_models = []

        for model in models:
            model_is_central = ShardRouter._model_should_read_from_central(model)
            get_model = (
                get_all_models
                or (get_central_models and model_is_central)
                or (get_sharded_models and not model_is_central)
            )
            if not get_model:
                continue

            if model._meta.app_label in ignore_apps:
                continue
            if ignore_proxy_models and model._meta.proxy:
                continue

            for fk_model in get_fk_chain(model):
                fk_model_is_central = ShardRouter._model_should_read_from_central(
                    fk_model
                )
                get_fk_model = (
                    get_all_models
                    or (get_central_models and fk_model_is_central)
                    or (get_sharded_models and not fk_model_is_central)
                )

                if get_fk_model and fk_model not in fk_models:
                    fk_models.append(fk_model)

        logger.debug(f"Got {len(fk_models)} models")
        return fk_models

I’m expecting (based on my current understanding) to create the exact same error as it currently does. (In other words, I’m not expecting any different behavior - it’s more of a confirmation that my conjecture is likely correct.)

From what you’re showing here, neither am I. I have to hope that someone more knowledgeable than I jumps in on this thread to help.

From what I can see, your forwards_code function should be getting the instance of Apps containing the historical models as its first parameter. You would need to pass this to your get_fk_models function.

I don’t see where you define apps for the following statement in get_fk_models:

But it’s possible that just using the apps being passed in as a parameter might work.

(See the docs and example for RunPython)

It turns out apps.get_model(model._meta.label) does actually get the history version of the model. When I do print("writing to", model) I get writing to <class '__fake__.UserProfile'> which indicates it’s a HistoricalModel?

Where is the error being thrown? (What line / statement?)

In the bulk create:

Traceback (most recent call last):
  File "manage.py", line 10, in <module>
    execute_from_command_line(sys.argv)
  File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
    utility.execute()
  File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 375, in execute
    self.fetch_command(subcommand).run_from_argv(self.argv)
  File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 323, in run_from_argv
    self.execute(*args, **cmd_options)
  File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 364, in execute
    output = self.handle(*args, **options)
  File "/code/common/management/commands/migrate_schemas.py", line 41, in handle
    executor.run_migrations(
  File "/code/common/migration_executors/base.py", line 104, in run_migrations
    run_migrations(
  File "/code/common/migration_executors/base.py", line 68, in run_migrations
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)
  File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 364, in execute
    output = self.handle(*args, **options)
  File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
    res = handle_func(*args, **kwargs)
  File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 232, in handle
    post_migrate_state = executor.migrate(
  File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
    state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
  File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
    state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
  File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
    state = migration.apply(state, schema_editor)
  File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
    operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
  File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
    self.code(from_state.apps, schema_editor)
  File "/code/testing/restore_central.py", line 59, in forwards_code
    model.objects.using(db_alias).bulk_create(
  File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 468, in bulk_create
    self._batched_insert(objs_with_pk, fields, batch_size, ignore_conflicts=ignore_conflicts)
  File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1202, in _batched_insert
    inserted_id = self._insert(
  File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1186, in _insert
    return query.get_compiler(using=using).execute_sql(return_id)
  File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1376, in execute_sql
    for sql, params in self.as_sql():
  File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1318, in as_sql
    value_rows = [
  File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1319, in <listcomp>
    [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields]
  File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1319, in <listcomp>
    [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields]
  File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1270, in pre_save_val
    return field.pre_save(obj, add=True)
  File "/usr/local/lib/python3.8/site-packages/django/db/models/fields/__init__.py", line 773, in pre_save
    return getattr(model_instance, self.attname)
AttributeError: 'UserProfile' object has no attribute 'lowest_priority_to_alert'

The plot thickens though, if I add

            if model._meta.db_table == "collector_userprofile":
                print(model)
                print(
                    f"Fields of __fake__ UserProfile: {[f.name for f in model._meta.get_fields()]}"
                )

            if hasattr(model, "objects"):
                if model._meta.db_table == "collector_userprofile":
                    print(
                        f"Deserialized object attrs: [{[d.object.__dict__ for d in deserialized]}]"
                    )

                model.objects.using(db_alias).bulk_create(
                    d.object for d in deserialized
                )
            else:

to do some debugging, the migrations run through fine:

Fields of __fake__ UserProfile: ['id', 'deleted_at', 'account_type', 'job_title', 'timezone', 'locale', 'api_only', 'api_burst_rate', 'api_sustained_rate', 'saved_frontend_settings_v2', 'lowest_priority_to_alert', 'mobile_push_notifications', 'dark_hour_start', 'dark_hour_end', 'is_fake', 'is_hidden', 'mobile_tabbar_options', 'menu_options', 'fixed_frontend_settings', 'disable_screen_recording', 'business', 'group', 'user', 'clock_format', 'csv_delimiter', 'temperature_unit', 'is_superstaff_user', 'set_of_cards', 'webapp_cards_v3', 'feature_flags']

Deserialized object attrs: [[{'_state': <django.db.models.base.ModelState object at 0x7fff9f380c70>, 'id': 1, 'deleted_at': None, 'user_id': 1, 'business_id': 795, 'group_id': None, 'account_type': 1, 'job_title': None, 'timezone': 'Europe/London', 'locale': 'en-GB', 'api_only': False, 'api_burst_rate': 200, 'api_sustained_rate': 10000, 'saved_frontend_settings_v2': {'sales': {'tax': 0, 'comps': 0, 'promos': 0, 'service': 0}, 'labour': {'showSalaried': 1, 'labourOverhead': 0, 'spreadSalaried': 0}}, 'is_superstaff_user': False, 'is_fake': False, 'is_hidden': False, 'mobile_tabbar_options': ['home', 'sales', 'social', 'employee', 'more'], 'menu_options': [], 'fixed_frontend_settings': None, 'disable_screen_recording': False, 'temperature_unit': None, 'clock_format': None, 'csv_delimiter': None}]]

I think some weird stuff might be happening with docker (beyond the scope of this forum I know) as when I inspect the container that is running these migrations, the fixture data DOES include lowest_priority_to_alert, where as my local machine DOES NOT