From f3df80deaa5d2fecc67b2c4d7cff51ab5bc560cd Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Mon, 1 Jul 2024 22:59:34 -0700 Subject: [PATCH 1/9] APIv2 Django Ninja device info added --- Dockerfile | 58 +- docker-compose.override.yml | 49 +- docker-compose.yml | 37 +- .../0008_alter_gender_race_fields_setfield.py | 25 - home/migrations/0010_auto_20220321_0407.py | 22 - ...vice_model_device_manufacturer_and_more.py | 127 ++ home/models/account.py | 316 ++-- home/models/device.py | 127 +- home/views/api/__init__.py | 2 +- home/views/api/admin.py | 1327 ++++++++--------- home/views/api/api.py | 0 home/views/api/appuser.py | 664 ++++----- home/views/api/appuserv2.py | 126 ++ home/views/api/contest.py | 74 +- home/views/api/dailywalk.py | 440 +++--- home/views/api/export.py | 558 +++---- home/views/api/histogram/serializers.py | 1018 ++++++------- home/views/api/intentionalwalk.py | 364 ++--- home/views/api/leaderboard.py | 227 ++- home/views/api/schemas/account.py | 145 ++ .../api/serializers/request_serializers.py | 281 ++-- .../api/serializers/response_serializers.py | 74 +- home/views/api/utils.py | 178 +-- home/views/api/weeklygoal.py | 348 ++--- home/views/apiv2/admin.py | 621 ++++++++ home/views/apiv2/appuser.py | 135 ++ home/views/apiv2/contest.py | 18 + home/views/apiv2/dailywalk.py | 135 ++ home/views/apiv2/device.py | 72 + home/views/apiv2/export.py | 273 ++++ home/views/apiv2/histogram/histogram.py | 422 ++++++ home/views/apiv2/intentionalwalk.py | 96 ++ home/views/apiv2/leaderboard.py | 79 + home/views/apiv2/schemas/account.py | 241 +++ home/views/apiv2/schemas/admin.py | 289 ++++ home/views/apiv2/schemas/contest.py | 27 + home/views/apiv2/schemas/dailywalk.py | 58 + home/views/apiv2/schemas/device.py | 117 ++ home/views/apiv2/schemas/intentionalwalk.py | 100 ++ home/views/apiv2/schemas/leaderboard.py | 31 + home/views/apiv2/schemas/weeklygoal.py | 83 ++ home/views/apiv2/weeklygoal.py | 105 ++ poetry.lock | 164 +- pyproject.toml | 76 +- requirements.txt | 7 +- scripts/dummydata.py | 9 +- server/urls.py | 71 +- 47 files changed, 6636 insertions(+), 3180 deletions(-) create mode 100644 home/migrations/0014_device_device_model_device_manufacturer_and_more.py create mode 100644 home/views/api/api.py create mode 100644 home/views/api/appuserv2.py create mode 100644 home/views/api/schemas/account.py create mode 100644 home/views/apiv2/admin.py create mode 100644 home/views/apiv2/appuser.py create mode 100644 home/views/apiv2/contest.py create mode 100644 home/views/apiv2/dailywalk.py create mode 100644 home/views/apiv2/device.py create mode 100644 home/views/apiv2/export.py create mode 100644 home/views/apiv2/histogram/histogram.py create mode 100644 home/views/apiv2/intentionalwalk.py create mode 100644 home/views/apiv2/leaderboard.py create mode 100644 home/views/apiv2/schemas/account.py create mode 100644 home/views/apiv2/schemas/admin.py create mode 100644 home/views/apiv2/schemas/contest.py create mode 100644 home/views/apiv2/schemas/dailywalk.py create mode 100644 home/views/apiv2/schemas/device.py create mode 100644 home/views/apiv2/schemas/intentionalwalk.py create mode 100644 home/views/apiv2/schemas/leaderboard.py create mode 100644 home/views/apiv2/schemas/weeklygoal.py create mode 100644 home/views/apiv2/weeklygoal.py diff --git a/Dockerfile b/Dockerfile index cca6fedb..752000ed 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,29 +1,29 @@ -FROM python:3.11.4 -ENV PATH="/root/.local/bin:${PATH}" - -# Install postgres client -RUN wget -q https://www.postgresql.org/media/keys/ACCC4CF8.asc -O - | apt-key add - && \ - echo "deb http://apt.postgresql.org/pub/repos/apt/ bullseye-pgdg main" >> /etc/apt/sources.list.d/pgdg.list && \ - apt-get update -y && \ - apt-get install -y postgresql-client-12 tzdata && \ - curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \ - apt-get install -y nodejs && \ - rm -rf /var/lib/apt/lists/* - -# Install Heroku client -RUN curl https://cli-assets.heroku.com/install.sh | sh - -# Install poetry -RUN curl -sSL https://install.python-poetry.org | python3 - - -# Add files -ADD . /app -WORKDIR /app - -# Install client dependencies and update path to include poetry and node module executables -RUN npm install && \ - echo "export PATH=~/.local/bin:/app/node_modules/.bin:/app/client/node_modules/.bin:\$PATH\n" >> /root/.bashrc - -# Run poetry to install dependencies -RUN poetry config virtualenvs.create false && \ - poetry install +FROM python:3.11.9-bookworm +ENV PATH="/root/.local/bin:${PATH}" + +# Install postgres client +RUN wget -q https://www.postgresql.org/media/keys/ACCC4CF8.asc -O - | apt-key add - && \ + echo "deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main" >> /etc/apt/sources.list.d/pgdg.list && \ + apt-get update -y && \ + apt-get install -y postgresql-client-14 tzdata && \ + curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \ + apt-get install -y nodejs && \ + rm -rf /var/lib/apt/lists/* + +# Install Heroku client +RUN curl https://cli-assets.heroku.com/install.sh | sh + +# Install poetry +RUN curl -sSL https://install.python-poetry.org | python3 - + +# Add files +ADD . /app +WORKDIR /app + +# Install client dependencies and update path to include poetry and node module executables +RUN npm install && \ + echo "export PATH=~/.local/bin:/app/node_modules/.bin:/app/client/node_modules/.bin:\$PATH\n" >> /root/.bashrc + +# Run poetry to install dependencies +RUN poetry config virtualenvs.create false && \ + poetry install diff --git a/docker-compose.override.yml b/docker-compose.override.yml index d5777ade..2c0748e9 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -1,25 +1,24 @@ -version: '3' -services: - server: - container_name: server - command: bash -l -c "bin/init; nf start -j Procfile.dev" - environment: - - PORT=3000 - ports: - - 3000:3000 - volumes: - - .:/app - - /app/client/node_modules - - /app/node_modules - coverage: - image: python:3.8 - volumes: - - .:/app - working_dir: /app/htmlcov - command: python -m http.server 8001 - ports: - - "8001:8001" - db: - image: postgres:12 - ports: - - 5433:5432 +services: + server: + container_name: server + command: bash -l -c "bin/init; nf start -j Procfile.dev" + environment: + - PORT=3000 + ports: + - 3000:3000 + volumes: + - .:/app + - /app/client/node_modules + - /app/node_modules + coverage: + image: python:3.8 + volumes: + - .:/app + working_dir: /app/htmlcov + command: python -m http.server 8001 + ports: + - "8001:8001" + db: + image: postgres:14.12 + ports: + - 5433:5432 diff --git a/docker-compose.yml b/docker-compose.yml index 03a19f39..13f70bbe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,19 +1,18 @@ -version: '3' -services: - db: - image: postgres:12 - environment: - - POSTGRES_HOST_AUTH_METHOD=trust - volumes: - - postgres-data:/var/lib/postgresql/data - server: - build: . - env_file: - - .env - ports: - - ${PORT:-8000}:8000 - depends_on: - - db - -volumes: - postgres-data: {} +services: + db: + image: postgres:14.12 + environment: + - POSTGRES_HOST_AUTH_METHOD=trust + volumes: + - postgres-data:/var/lib/postgresql/data + server: + build: . + env_file: + - .env + ports: + - ${PORT:-8000}:8000 + depends_on: + - db + +volumes: + postgres-data: {} diff --git a/home/migrations/0008_alter_gender_race_fields_setfield.py b/home/migrations/0008_alter_gender_race_fields_setfield.py index cceba01d..ba369119 100644 --- a/home/migrations/0008_alter_gender_race_fields_setfield.py +++ b/home/migrations/0008_alter_gender_race_fields_setfield.py @@ -1,6 +1,5 @@ # Generated by Django 3.2.9 on 2021-12-28 02:13 -import setfield from django.db import migrations, models import home.models.account @@ -43,30 +42,6 @@ class Migration(migrations.Migration): null=True, ), ), - migrations.RemoveField( - model_name="account", - name="race", - ), - migrations.AddField( - model_name="account", - name="race", - field=setfield.SetField( - base_field=models.CharField( - choices=[ - ("NA", home.models.account.RaceLabels["NA"]), - ("BL", home.models.account.RaceLabels["BL"]), - ("AS", home.models.account.RaceLabels["AS"]), - ("PI", home.models.account.RaceLabels["PI"]), - ("WH", home.models.account.RaceLabels["WH"]), - ("OT", home.models.account.RaceLabels["OT"]), - ], - max_length=2, - ), - blank=True, - default=list, - size=None, - ), - ), migrations.AlterField( model_name="dailywalk", name="date", diff --git a/home/migrations/0010_auto_20220321_0407.py b/home/migrations/0010_auto_20220321_0407.py index c4b14fe3..383c9a0d 100644 --- a/home/migrations/0010_auto_20220321_0407.py +++ b/home/migrations/0010_auto_20220321_0407.py @@ -1,6 +1,5 @@ # Generated by Django 3.2.12 on 2022-03-21 11:07 -import setfield from django.db import migrations, models import home.models.account @@ -43,25 +42,4 @@ class Migration(migrations.Migration): null=True, ), ), - migrations.AlterField( - model_name="account", - name="race", - field=setfield.SetField( - base_field=models.CharField( - choices=[ - ("NA", home.models.account.RaceLabels["NA"]), - ("BL", home.models.account.RaceLabels["BL"]), - ("AS", home.models.account.RaceLabels["AS"]), - ("PI", home.models.account.RaceLabels["PI"]), - ("WH", home.models.account.RaceLabels["WH"]), - ("OT", home.models.account.RaceLabels["OT"]), - ("DA", home.models.account.RaceLabels["DA"]), - ], - max_length=2, - ), - blank=True, - default=list, - size=None, - ), - ), ] diff --git a/home/migrations/0014_device_device_model_device_manufacturer_and_more.py b/home/migrations/0014_device_device_model_device_manufacturer_and_more.py new file mode 100644 index 00000000..208c4a64 --- /dev/null +++ b/home/migrations/0014_device_device_model_device_manufacturer_and_more.py @@ -0,0 +1,127 @@ +# Generated by Django 4.2.11 on 2024-07-01 03:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("home", "0013_weeklygoal"), + ] + + operations = [ + migrations.AddField( + model_name="device", + name="device_model", + field=models.CharField( + blank=True, + help_text='Unique identifier for the device\'s model.\n getDeviceid() - Gets the device ID.\n iOS: "iPhone7,2"\n Android: "goldfish"\n Windows: "Y3R94UC#AC4"\n ', + max_length=25, + null=True, + ), + ), + migrations.AddField( + model_name="device", + name="manufacturer", + field=models.CharField( + blank=True, + help_text='Manufacturer of the device.\n getManufacturer() - Gets the device manufacturer\n iOS: "Apple"\n Android: "Google"\n Windows: ?\n ', + max_length=25, + null=True, + ), + ), + migrations.AddField( + model_name="device", + name="os_name", + field=models.CharField( + blank=True, + help_text='Operating system of the device.\n getSystemName() - Gets the device OS name.\n iOS: "iOS" on newer iOS devices "iPhone OS" on older devices (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher.\n Android: "Android"\n Windows: ?\n ', + max_length=25, + null=True, + ), + ), + migrations.AddField( + model_name="device", + name="os_version", + field=models.CharField( + blank=True, + help_text='Device operating system version. \n getSystemVersion() - Gets the device OS version.\n iOS: "11.0"\n Android: "7.1.1"\n Windows: ?\n ', + max_length=25, + null=True, + ), + ), + migrations.AlterField( + model_name="account", + name="gender", + field=models.CharField( + blank=True, + choices=[ + ("CF", "CF"), + ("CM", "CM"), + ("TF", "TF"), + ("TM", "TM"), + ("NB", "NB"), + ("OT", "OT"), + ("DA", "DA"), + ], + help_text="Self-identified gender identity of user", + max_length=2, + null=True, + ), + ), + migrations.AlterField( + model_name="account", + name="is_latino", + field=models.CharField( + blank=True, + choices=[("YE", "YE"), ("NO", "NO"), ("DA", "DA")], + help_text="Latino or Hispanic origin", + max_length=2, + null=True, + ), + ), + migrations.RemoveField( + model_name="account", + name="race", + ), + migrations.AddField( + model_name="account", + name="race", + field=models.JSONField( + blank=True, + choices=[ + ("NA", "NA"), + ("BL", "BL"), + ("AS", "AS"), + ("PI", "PI"), + ("WH", "WH"), + ("OT", "OT"), + ("DA", "DA"), + ], + null=True, + ), + ), + migrations.AlterField( + model_name="account", + name="sexual_orien", + field=models.CharField( + blank=True, + choices=[ + ("BS", "BS"), + ("SG", "SG"), + ("US", "US"), + ("HS", "HS"), + ("OT", "OT"), + ("DA", "DA"), + ], + help_text="Self-identified sexual orientation of user", + max_length=2, + null=True, + ), + ), + migrations.AlterField( + model_name="account", + name="zip", + field=models.CharField(help_text="User's zip code", max_length=25), + ), + ] diff --git a/home/models/account.py b/home/models/account.py index c84e6046..1c0c2d4f 100644 --- a/home/models/account.py +++ b/home/models/account.py @@ -1,154 +1,162 @@ -from enum import Enum - -from django.db import models -from setfield import SetField - -SAN_FRANCISCO_ZIP_CODES = set( - [ - "94102", - "94103", - "94104", - "94105", - "94107", - "94108", - "94109", - "94110", - "94111", - "94112", - "94114", - "94115", - "94116", - "94117", - "94118", - "94121", - "94122", - "94123", - "94124", - "94127", - "94129", - "94130", - "94131", - "94132", - "94133", - "94134", - "94158", - ] -) - - -class GenderLabels(Enum): - CF = "Female" - CM = "Male" - TF = "Trans Female" - TM = "Trans Male" - NB = "Non-binary" - OT = "Other" - DA = "Decline to answer" - - -class RaceLabels(Enum): - NA = "American Indian or Alaska Native" - BL = "Black" - AS = "Asian" - PI = "Native Hawaiian or other Pacific Islander" - WH = "White" - OT = "Other" - DA = "Decline to answer" - - -class SexualOrientationLabels(Enum): - BS = "Bisexual" - SG = "SameGenderLoving" - US = "Unsure" - HS = "Heterosexual" - OT = "Other" - DA = "Decline to answer" - - -class IsLatinoLabels(Enum): - YE = "Yes" - NO = "No" - DA = "Decline to answer" - - -# Note: Maybe inherit from Django's User model? -class Account(models.Model): - """ - Stores a single user account as identified by email. This is created when - the app is installed and the user signs up for the first time and is has - multiple devices - :model: `home.Device` associated with it - """ - - email = models.EmailField( - unique=True, help_text="Email which uniquely identifies an account" - ) - name = models.CharField(max_length=250, help_text="User's name") - zip = models.CharField(max_length=25, help_text="User's zipcode") - age = models.IntegerField(help_text="User's age") - is_sf_resident = models.BooleanField( - null=True, - help_text="Whether the user is a SF resident or not, based on zip", - ) - is_latino = models.CharField( - max_length=2, - null=True, - blank=True, - help_text="Latino or Hispanic origin", - ) - race = SetField( - models.CharField( - max_length=2, choices=list(RaceLabels.__members__.items()) - ), - default=list, - blank=True, - ) - race_other = models.CharField( - max_length=75, - null=True, - blank=True, - help_text="Free-form text field for 'race' value 'OT'", - ) - gender = models.CharField( - max_length=2, - null=True, - blank=True, - help_text="Self-identified gender identity of user", - ) - gender_other = models.CharField( - max_length=75, - null=True, - blank=True, - help_text="Free-form text field for 'gender' value 'OT'", - ) - sexual_orien = models.CharField( - max_length=2, - null=True, - blank=True, - help_text="Self-identified sexual orientation of user", - ) - sexual_orien_other = models.CharField( - max_length=75, - null=True, - blank=True, - help_text="Free-form text field for 'sexual_orien' value 'OT'", - ) - is_tester = models.BooleanField( - default=False, help_text="User is an app tester" - ) - contests = models.ManyToManyField( - "Contest", - blank=True, - help_text="All the contests the account has enrolled in", - ) - created = models.DateTimeField( - auto_now_add=True, help_text="Accounts creation timestamp" - ) - updated = models.DateTimeField( - auto_now=True, help_text="Accounts updation timestamp" - ) - - def __str__(self): - return f"{self.name} | {self.email}" - - class Meta: - ordering = ("-created",) +import json +from enum import Enum + +from django.db import models +from django.utils.translation import gettext_lazy as _ + +SAN_FRANCISCO_ZIP_CODES = set( + [ + "94102", + "94103", + "94104", + "94105", + "94107", + "94108", + "94109", + "94110", + "94111", + "94112", + "94114", + "94115", + "94116", + "94117", + "94118", + "94121", + "94122", + "94123", + "94124", + "94127", + "94129", + "94130", + "94131", + "94132", + "94133", + "94134", + "94158", + ] +) + + +class GenderLabels(models.TextChoices): + CF = "CF", _("Female") + CM = "CM", _("Male") + TF = "TF", _("Trans Female") + TM = "TM", _("Trans Male") + NB = "NB", _("Non-binary") + OT = "OT", _("Other") + DA = "DA", _("Decline to answer") + + +class RaceLabels(models.TextChoices): + NA = "NA", _("American Indian or Alaska Native") + BL = "BL", _("Black") + AS = "AS", _("Asian") + PI = "PI", _("Native Hawaiian or other Pacific Islander") + WH = "WH", _("White") + OT = "OT", _("Other") + DA = "DA", _("Decline to answer") + + +class SexualOrientationLabels(models.TextChoices): + BS = "BS", _("Bisexual") + SG = "SG", _("SameGenderLoving") + US = "US", _("Unsure") + HS = "HS", _("Heterosexual") + OT = "OT", _("Other") + DA = "DA", _("Decline to answer") + + +class IsLatinoLabels(models.TextChoices): + YE = "YE", _("Yes") + NO = "NO", _("No") + DA = "DA", _("Decline to answer") + + +# Note: Maybe inherit from Django's User model? +class Account(models.Model): + """ + Stores a single user account as identified by email. This is created when + the app is installed and the user signs up for the first time and is has + multiple devices - :model: `home.Device` associated with it + """ + + email = models.EmailField( + unique=True, help_text="Email which uniquely identifies an account" + ) + name = models.CharField(max_length=250, help_text="User's name") + zip = models.CharField(max_length=25, help_text="User's zip code") + age = models.IntegerField(help_text="User's age") + is_sf_resident = models.BooleanField( + null=True, + help_text="Whether the user is a SF resident or not, based on zip", + ) + is_latino = models.CharField( + max_length=2, + null=True, + blank=True, + choices=IsLatinoLabels.__members__.items(), + help_text="Latino or Hispanic origin", + ) + race = models.JSONField( + null=True, + blank=True, + choices=list(RaceLabels.__members__.items()), + ) + race_other = models.CharField( + max_length=75, + null=True, + blank=True, + help_text="Free-form text field for 'race' value 'OT'", + ) + gender = models.CharField( + max_length=2, + null=True, + blank=True, + choices=GenderLabels.__members__.items(), + help_text="Self-identified gender identity of user", + ) + gender_other = models.CharField( + max_length=75, + null=True, + blank=True, + help_text="Free-form text field for 'gender' value 'OT'", + ) + sexual_orien = models.CharField( + max_length=2, + null=True, + blank=True, + choices=SexualOrientationLabels.__members__.items(), + help_text="Self-identified sexual orientation of user", + ) + sexual_orien_other = models.CharField( + max_length=75, + null=True, + blank=True, + help_text="Free-form text field for 'sexual_orien' value 'OT'", + ) + is_tester = models.BooleanField( + default=False, help_text="User is an app tester" + ) + contests = models.ManyToManyField( + "Contest", + blank=True, + help_text="All the contests the account has enrolled in", + ) + created = models.DateTimeField( + auto_now_add=True, help_text="Accounts creation timestamp" + ) + updated = models.DateTimeField( + auto_now=True, help_text="Accounts updation timestamp" + ) + + def __str__(self): + return f"{self.name} | {self.email}" + + def set_race(self, x): + self.foo = json.dumps(x) + + def get_race(self): + return json.loads(self.foo) + + class Meta: + ordering = ("-created",) diff --git a/home/models/device.py b/home/models/device.py index c35c8156..eb331663 100644 --- a/home/models/device.py +++ b/home/models/device.py @@ -1,34 +1,93 @@ -from django.db import models - - -class Device(models.Model): - """ - Stores a single device registration. This entry is created when the app - is installed and a registration call is made from the device. - This is always linked to a user's account - :model: `home.Account`. - All the daily and intentional walks link back to this - """ - - device_id = models.CharField( - max_length=250, - primary_key=True, - help_text=( - "A unique id generated by the app when it is first installed" - " on a device. Used for authentication on subsequent calls" - ), - ) - account = models.ForeignKey( - "Account", - on_delete=models.CASCADE, - help_text="User account associated with this registered device", - ) - created = models.DateTimeField( - auto_now_add=True, - help_text="When the record was created/device was registered", - ) - - def __str__(self): - return f"{self.device_id} | {self.account} " - - class Meta: - ordering = ("-created",) +from django.db import models + + +class Device(models.Model): + """ + Stores a single device registration. This entry is created when the app + is installed and a registration call is made from the device. + This is always linked to a user's account - :model: `home.Account`. + All the daily and intentional walks link back to this + """ + + device_id = models.CharField( + max_length=250, + primary_key=True, + help_text=( + "A unique id generated by the app when it is first installed" + " on a device. Used for authentication on subsequent calls" + ), + ) + account = models.ForeignKey( + "Account", + on_delete=models.CASCADE, + help_text="User account associated with this registered device", + ) + created = models.DateTimeField( + auto_now_add=True, + help_text="When the record was created/device was registered", + ) + device_model = models.CharField( + max_length=25, + null=True, + blank=True, + help_text="""Unique identifier for the device's model. + getDeviceid() - Gets the device ID. + iOS: "iPhone7,2" + Android: "goldfish" + Windows: "Y3R94UC#AC4" + """, + ) + # device_type = models.CharField( + # max_length=25, + # null=True, + # blank=True, + # help_text="""" + # getDeviceType() - Returns the device's type as a string, which will be one of: + # Handset + # Tablet + # Tv + # Desktop + # GamingConsole + # Headset + # unknown + # """, + # ) + manufacturer = models.CharField( + max_length=25, + null=True, + blank=True, + help_text="""Manufacturer of the device. + getManufacturer() - Gets the device manufacturer + iOS: "Apple" + Android: "Google" + Windows: ? + """, + ) + os_name = models.CharField( + max_length=25, + null=True, + blank=True, + help_text="""Operating system of the device. + getSystemName() - Gets the device OS name. + iOS: "iOS" on newer iOS devices "iPhone OS" on older devices (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher. + Android: "Android" + Windows: ? + """, + ) + os_version = models.CharField( + max_length=25, + null=True, + blank=True, + help_text="""Device operating system version. + getSystemVersion() - Gets the device OS version. + iOS: "11.0" + Android: "7.1.1" + Windows: ? + """, + ) + + def __str__(self): + return f"{self.device_id} | {self.account} " + + class Meta: + ordering = ("-created",) diff --git a/home/views/api/__init__.py b/home/views/api/__init__.py index e427f172..f07271ac 100644 --- a/home/views/api/__init__.py +++ b/home/views/api/__init__.py @@ -1 +1 @@ -# home.views.api +# home.views.api diff --git a/home/views/api/admin.py b/home/views/api/admin.py index 6c33eebc..cdf2c899 100644 --- a/home/views/api/admin.py +++ b/home/views/api/admin.py @@ -1,669 +1,658 @@ -import itertools -import logging -import os - -from datetime import timedelta -from dateutil import parser - -from django.db import connection -from django.db.models import ( - CharField, - Count, - Q, - Sum, - Value, -) -from django.db.models.functions import Concat, TruncDate -from django.http import HttpRequest, HttpResponse, JsonResponse -from django.views import View - -from home.models import Account, Contest, DailyWalk -from home.models.intentionalwalk import IntentionalWalk -from home.models.leaderboard import Leaderboard -from home.views.api.histogram.serializers import ( - HistogramReqSerializer, - ValidatedHistogramReq, -) -from home.views.api.serializers.request_serializers import ( - GetUsersReqSerializer, -) -from home.views.api.serializers.response_serializers import ( - GetUsersRespSerializer, -) - -from .utils import paginate, require_authn - -logger = logging.getLogger(__name__) - - -class AdminMeView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - if request.user.is_authenticated: - return JsonResponse( - { - "id": request.user.id, - "username": request.user.username, - "first_name": request.user.first_name, - "last_name": request.user.last_name, - "email": request.user.email, - } - ) - else: - return HttpResponse(status=204) - - -class AdminHomeView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - filters = {"is_tester": False} - if request.user.is_authenticated: - results = Account.objects.filter(**filters).aggregate( - Sum("dailywalk__steps"), - Sum("dailywalk__distance"), - ) - payload = { - "accounts_count": Account.objects.filter(**filters).count(), - "accounts_steps": results["dailywalk__steps__sum"], - "accounts_distance": results["dailywalk__distance__sum"], - } - return JsonResponse(payload) - else: - return HttpResponse(status=204) - - -class AdminHomeGraphView(View): - http_method_names = ["get"] - - def is_cumulative(self): - return False - - def get_results(self): - return [] - - def get(self, request, *args, **kwargs): - if not request.user.is_authenticated: - return HttpResponse(status=401) - - # handle common parameters for all the chart data API endpoints - contest_id = request.GET.get("contest_id", None) - if contest_id: - contest = Contest.objects.get(pk=contest_id) - self.start_date = min( - contest.start_baseline, contest.start_promo - ).isoformat() - self.end_date = contest.end.isoformat() - else: - self.start_date = request.GET.get("start_date", None) - self.end_date = request.GET.get("end_date", None) - self.is_tester = request.GET.get("is_tester", None) == "true" - - # let the concrete subclass implement the actual query - results = self.get_results() - - # handle common result processing for the chart data - if len(results) > 0: - if ( - self.start_date - and results[0][0] != f"{self.start_date}T00:00:00" - ): - results.insert(0, [f"{self.start_date}T00:00:00", 0]) - if self.end_date and results[-1][0] != f"{self.end_date}T00:00:00": - if self.is_cumulative(): - results.append( - [f"{self.end_date}T00:00:00", results[-1][1]] - ) - else: - results.append([f"{self.end_date}T00:00:00", 0]) - else: - results.append([self.start_date, 0]) - results.append([self.end_date, 0]) - results.insert(0, ["Date", "Count"]) - return JsonResponse(results, safe=False) - - -class AdminHomeUsersDailyView(AdminHomeGraphView): - def get_results(self): - filters = Q() - # filter to show users vs testers - filters = filters & Q(is_tester=self.is_tester) - # filter by date - if self.start_date: - filters = filters & Q(created__gte=self.start_date) - if self.end_date: - filters = filters & Q( - created__lt=parser.parse(self.end_date) + timedelta(days=1) - ) - results = ( - Account.objects.filter(filters) - .annotate( - date=Concat( - TruncDate("created"), - Value("T00:00:00"), - output_field=CharField(), - ) - ) - .values("date") - .annotate(count=Count("id")) - .order_by("date") - ) - results = [[row["date"], row["count"]] for row in results] - return results - - -class AdminHomeUsersCumulativeView(AdminHomeGraphView): - def is_cumulative(self): - return True - - def get_results(self): - conditions = """ - "is_tester"=%s - """ - params = [self.is_tester] - if self.start_date: - conditions = f"""{conditions} AND - "created" >= %s - """ - params.append(self.start_date) - if self.end_date: - conditions = f"""{conditions} AND - "created" < %s - """ - params.append(parser.parse(self.end_date) + timedelta(days=1)) - - with connection.cursor() as cursor: - cursor.execute( - f""" - SELECT "date", (SUM("count") OVER (ORDER BY "date"))::int AS "count" - FROM - (SELECT - CONCAT(("created" AT TIME ZONE '{os.getenv("TIME_ZONE", "America/Los_Angeles")}')::date, - 'T00:00:00') AS "date", - COUNT("id") AS "count" - FROM "home_account" - WHERE {conditions} - GROUP BY "date") subquery - ORDER BY "date" - """, - params, - ) - results = cursor.fetchall() - return list(results) - - -class AdminHomeWalksDailyView(AdminHomeGraphView): - def get_value_type(self): - return None - - def get_results(self): - filters = Q() - # filter to show users vs testers - filters = filters & Q(account__is_tester=self.is_tester) - # filter by date - if self.start_date: - filters = filters & Q(date__gte=self.start_date) - if self.end_date: - filters = filters & Q(date__lte=self.end_date) - results = ( - DailyWalk.objects.filter(filters) - .annotate( - date_time=Concat( - "date", - Value("T00:00:00"), - output_field=CharField(), - ), - ) - .values("date_time") - .annotate( - count=Sum(self.get_value_type()), - ) - .order_by("date_time") - ) - results = [[row["date_time"], row["count"]] for row in results] - return results - - -class AdminHomeStepsDailyView(AdminHomeWalksDailyView): - def get_value_type(self): - return "steps" - - -class AdminHomeDistanceDailyView(AdminHomeWalksDailyView): - def get_value_type(self): - return "distance" - - -class AdminHomeWalksCumulativeView(AdminHomeGraphView): - def is_cumulative(self): - return True - - def get_value_type(self): - return None - - def get_results(self): - conditions = """ - "home_account"."is_tester"=%s - """ - params = [self.is_tester] - if self.start_date: - conditions = f"""{conditions} AND - "home_dailywalk"."date" >= %s - """ - params.append(self.start_date) - if self.end_date: - conditions = f"""{conditions} AND - "home_dailywalk"."date" <= %s - """ - params.append(self.end_date) - - with connection.cursor() as cursor: - cursor.execute( - f""" - SELECT "date", (SUM("count") OVER (ORDER BY "date"))::int AS "count" - FROM - (SELECT - CONCAT("date", 'T00:00:00') AS "date", - SUM("{self.get_value_type()}") AS "count" - FROM "home_dailywalk" - JOIN "home_account" ON "home_account"."id"="home_dailywalk"."account_id" - WHERE {conditions} - GROUP BY "date") subquery - ORDER BY "date" - """, - params, - ) - results = cursor.fetchall() - results = list(results) - return results - - -class AdminHomeStepsCumulativeView(AdminHomeWalksCumulativeView): - def get_value_type(self): - return "steps" - - -class AdminHomeDistanceCumulativeView(AdminHomeWalksCumulativeView): - def get_value_type(self): - return "distance" - - -class AdminContestsView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - if request.user.is_authenticated: - values = ["contest_id", "start", "end"] - order_by = ["-start"] - results = Contest.objects.values(*values).order_by(*order_by) - return JsonResponse(list(results), safe=False) - else: - return HttpResponse(status=401) - - -class AdminUsersView(View): - http_method_names = ["get"] - - @require_authn - def get(self, request, *args, **kwargs): - serializer = GetUsersReqSerializer(data=request.GET) - if not serializer.is_valid(): - return JsonResponse(serializer.errors, status=422) - - validated = serializer.validated_data - - contest_id = validated["contest_id"] - filters = validated["filters"] - order_by = validated["order_by"] - page = validated["page"] - per_page = validated["per_page"] - - annotate = validated["annotate"] - intentionalwalk_annotate = validated["intentionalwalk_annotate"] - - query = ( - Account.objects.filter(filters) - .values("id", "name", "email", "age", "zip", "created") - .annotate(**annotate) - .order_by(*order_by) - ) - query, links = paginate(request, query, page, per_page) - - iw_query = ( - Account.objects.filter(id__in=(row["id"] for row in query)) - .values("id") - .annotate(**intentionalwalk_annotate) - .order_by(*order_by) - ) - - def update_user_dto(dto, iw_stats): - dto.update(iw_stats) - # at this point, we have enough info to determine if user is "active" - if contest_id: - dto["is_active"] = dto["dw_count"] > 0 or dto["iw_count"] > 0 - return dto - - result_dto = [ - update_user_dto(dto, iw_stat) - for dto, iw_stat in zip(query, iw_query) - ] - resp = GetUsersRespSerializer(result_dto, many=True) - response = JsonResponse(resp.data, safe=False) - if links: - response.headers["Link"] = links - - return response - - -class AdminUsersByZipView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - values = ["zip"] - order_by = ["zip"] - if request.user.is_authenticated: - payload = {} - # filter and annotate based on contest_id - filters = None - annotate = { - "count": Count("zip"), - } - contest_id = request.GET.get("contest_id", None) - if contest_id: - filters = Q(contests__contest_id=contest_id) - else: - filters = Q() - - # filter to show users vs testers - filters = filters & Q( - is_tester=request.GET.get("is_tester", None) == "true" - ) - - # query for totals - results = ( - Account.objects.filter(filters) - .values(*values) - .annotate(**annotate) - .order_by(*order_by) - ) - payload["total"] = {r["zip"]: r["count"] for r in results} - - # now query for new if for contest - if contest_id: - contest = Contest.objects.get(pk=contest_id) - filters = filters & Q( - created__gte=contest.start_promo, - created__lt=contest.end + timedelta(days=1), - ) - results = ( - Account.objects.filter(filters) - .values(*values) - .annotate(**annotate) - .order_by(*order_by) - ) - payload["new"] = {r["zip"]: r["count"] for r in results} - - return JsonResponse(payload) - else: - return HttpResponse(status=401) - - -class AdminUsersActiveByZipView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - contest_id = request.GET.get("contest_id", None) - is_tester = request.GET.get("is_tester", None) == "true" - if not contest_id: - return HttpResponse(status=422) - elif request.user.is_authenticated: - payload = {} - contest = Contest.objects.get(pk=contest_id) - - with connection.cursor() as cursor: - cursor.execute( - """ - SELECT zip, COUNT(*) - FROM ( - SELECT DISTINCT(home_account.id), home_account.zip - FROM home_account - JOIN home_account_contests ON home_account.id=home_account_contests.account_id - LEFT JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id - LEFT JOIN home_intentionalwalk ON home_account.id=home_intentionalwalk.account_id - WHERE home_account.is_tester=%s AND - home_account_contests.contest_id=%s AND - ((home_dailywalk.id IS NOT NULL AND home_dailywalk.date BETWEEN %s AND %s) OR - (home_intentionalwalk.id IS NOT NULL AND - home_intentionalwalk.start >= %s AND home_intentionalwalk.start < %s)) - ) subquery - GROUP BY zip - """, - [ - is_tester, - contest_id, - contest.start, - contest.end, - contest.start, - contest.end + timedelta(days=1), - ], - ) - rows = cursor.fetchall() - payload["total"] = {row[0]: row[1] for row in rows} - cursor.execute( - """ - SELECT zip, COUNT(*) - FROM ( - SELECT DISTINCT(home_account.id), home_account.zip - FROM home_account - JOIN home_account_contests ON home_account.id=home_account_contests.account_id - LEFT JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id - LEFT JOIN home_intentionalwalk ON home_account.id=home_intentionalwalk.account_id - WHERE home_account.is_tester=%s AND - home_account_contests.contest_id=%s AND - home_account.created >= %s AND home_account.created < %s AND - ((home_dailywalk.id IS NOT NULL AND home_dailywalk.date BETWEEN %s AND %s) OR - (home_intentionalwalk.id IS NOT NULL AND - home_intentionalwalk.start >= %s AND home_intentionalwalk.start < %s)) - ) subquery - GROUP BY zip - """, - [ - is_tester, - contest_id, - contest.start_promo, - contest.end + timedelta(days=1), - contest.start, - contest.end, - contest.start, - contest.end + timedelta(days=1), - ], - ) - rows = cursor.fetchall() - payload["new"] = {row[0]: row[1] for row in rows} - - return JsonResponse(payload) - else: - return HttpResponse(status=401) - - -class AdminUsersByZipMedianStepsView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - if request.user.is_authenticated: - is_tester = request.GET.get("is_tester", None) == "true" - contest_id = request.GET.get("contest_id", None) - if contest_id is None: - return HttpResponse(status=422) - contest = Contest.objects.get(pk=contest_id) - payload = {} - with connection.cursor() as cursor: - cursor.execute( - """ - SELECT PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sum) - FROM ( - SELECT home_account.id AS id, SUM(home_dailywalk.steps) AS sum - FROM home_account - JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id - JOIN home_account_contests ON home_account.id=home_account_contests.account_id - WHERE home_account.is_tester=%s AND - home_account_contests.contest_id=%s AND - home_dailywalk.date BETWEEN %s AND %s - GROUP BY (home_account.id) - ) subquery - """, - [is_tester, contest_id, contest.start, contest.end], - ) - row = cursor.fetchone() - - payload["all"] = row[0] - cursor.execute( - """ - SELECT zip, PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sum) - FROM ( - SELECT home_account.id AS id, home_account.zip AS zip, SUM(home_dailywalk.steps) AS sum - FROM home_account - JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id - JOIN home_account_contests ON home_account.id=home_account_contests.account_id - WHERE home_account.is_tester=%s AND - home_account_contests.contest_id=%s AND - home_dailywalk.date BETWEEN %s AND %s - GROUP BY (home_account.id, home_account.zip) - ) subquery - GROUP BY zip - """, - [is_tester, contest_id, contest.start, contest.end], - ) - rows = cursor.fetchall() - for row in rows: - payload[row[0]] = row[1] - - response = JsonResponse(payload) - return response - else: - return HttpResponse(status=401) - - -class AdminHistogramView(View): - http_method_names = ["get"] - - supported_models = { - "users": Account, - "dailywalk": DailyWalk, - "intentionalwalk": IntentionalWalk, - "leaderboard": Leaderboard, - } - - @require_authn - def get(self, request: HttpRequest, model_name: str) -> HttpResponse: - """Get histogram data for a given model across a numberic field. - - Binning is done by the bin_size parameter, bin_count parameter, - or bin_custom parameter. - - bin_size: The size of each bin. - bin_count: The total number of bins. - bin_custom: A comma separated list of bin edges. - """ - if model_name not in self.supported_models: - return HttpResponse(status=404) - - serializer = HistogramReqSerializer( - data=request.GET, model=self.supported_models[model_name] - ) - if not serializer.is_valid(): - return JsonResponse(serializer.errors, status=422) - - # TODO: Implement a ResponseSerializer - # to handle serialization of the response - # (with filling of the bins) - # to either JSON or CSV format. - res: ValidatedHistogramReq = serializer.validated_data - - # even bins either specified by bin_size or bin_size computed from bin_count - if res.get("bin_size"): - return JsonResponse( - { - "data": list( - self.fill_missing_bin_idx( - query_set=res["query_set"], - bin_size=res["bin_size"], - total_bin_ct=res["bin_count"], - ) - ), - "unit": res["unit"], - "bin_size": res["bin_size"], - } - ) - # custom bins - return JsonResponse( - { - "data": list( - self.fill_missing_bin_idx( - query_set=res["query_set"], - bin_custom=res["bin_custom"], - total_bin_ct=res["bin_count"], - ) - ), - "unit": res["unit"], - "bin_custom": res["bin_custom"], - } - ) - - def fill_missing_bin_idx( - self, - query_set, - bin_size: int = None, - bin_custom: list = None, - total_bin_ct: int = 0, - ): - """Fill in missing bin intervals lazily. - - This is because the histogram is generated from a query set that may not have - found data in certain bins. - - For example, if the bins were [0, 18, 20, 33, 50, 70], - which creates bins from 0-17, 18-20, 20-33, 33-50, 50-70. - - There may be no users in in the 18-20 range, and no users in the 51 and 70. - In other words, missing on bin_idx = 1 and bin_idx = 4. - The query would not return any groupings for the [18, 20], or [50, 70] - This function will fill in those missing bins with a count of 0. - """ - - def create_filler(cursor, bin_size, bin_custom): - res = {} - # bin_start and bin_end are inclusive. - if bin_custom: - res["bin_start"] = bin_custom[cursor] - if cursor + 1 < len(bin_custom): - res["bin_end"] = bin_custom[cursor + 1] - else: - res["bin_start"] = cursor * bin_size - res["bin_end"] = (cursor + 1) * bin_size - # Done down here to maintain stable order of keys. - res["count"] = 0 - res["bin_idx"] = cursor - return res - - bin_idx_counter = itertools.count() - cursor = 0 - for bin in query_set: - cursor = next(bin_idx_counter) - curr_idx = bin["bin_idx"] - while curr_idx > cursor: - yield create_filler( - cursor=cursor, bin_size=bin_size, bin_custom=bin_custom - ) - cursor = next(bin_idx_counter) - yield bin - - cursor = next(bin_idx_counter) - # Fill in the rest of the bins with 0 count, - # until we reach the total expected count of bins. - while cursor and cursor < total_bin_ct: - yield create_filler( - cursor=cursor, bin_size=bin_size, bin_custom=bin_custom - ) - cursor = next(bin_idx_counter) +import itertools +import logging +import os +from datetime import timedelta + +from dateutil import parser +from django.db import connection +from django.db.models import CharField, Count, Q, Sum, Value +from django.db.models.functions import Concat, TruncDate +from django.http import HttpRequest, HttpResponse, JsonResponse +from django.views import View + +from home.models import Account, Contest, DailyWalk +from home.models.intentionalwalk import IntentionalWalk +from home.models.leaderboard import Leaderboard +from home.views.api.histogram.serializers import (HistogramReqSerializer, + ValidatedHistogramReq) +from home.views.api.serializers.request_serializers import \ + GetUsersReqSerializer +from home.views.api.serializers.response_serializers import \ + GetUsersRespSerializer + +from .utils import paginate, require_authn + +logger = logging.getLogger(__name__) + + +class AdminMeView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + if request.user.is_authenticated: + return JsonResponse( + { + "id": request.user.id, + "username": request.user.username, + "first_name": request.user.first_name, + "last_name": request.user.last_name, + "email": request.user.email, + } + ) + else: + return HttpResponse(status=204) + + +class AdminHomeView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + filters = {"is_tester": False} + if request.user.is_authenticated: + results = Account.objects.filter(**filters).aggregate( + Sum("dailywalk__steps"), + Sum("dailywalk__distance"), + ) + payload = { + "accounts_count": Account.objects.filter(**filters).count(), + "accounts_steps": results["dailywalk__steps__sum"], + "accounts_distance": results["dailywalk__distance__sum"], + } + return JsonResponse(payload) + else: + return HttpResponse(status=204) + + +class AdminHomeGraphView(View): + http_method_names = ["get"] + + def is_cumulative(self): + return False + + def get_results(self): + return [] + + def get(self, request, *args, **kwargs): + # if not request.user.is_authenticated: + # return HttpResponse(status=401) + + # handle common parameters for all the chart data API endpoints + contest_id = request.GET.get("contest_id", None) + if contest_id: + contest = Contest.objects.get(pk=contest_id) + self.start_date = min( + contest.start_baseline, contest.start_promo + ).isoformat() + self.end_date = contest.end.isoformat() + else: + self.start_date = request.GET.get("start_date", None) + self.end_date = request.GET.get("end_date", None) + self.is_tester = request.GET.get("is_tester", None) == "true" + + # let the concrete subclass implement the actual query + results = self.get_results() + + # handle common result processing for the chart data + if len(results) > 0: + if ( + self.start_date + and results[0][0] != f"{self.start_date}T00:00:00" + ): + results.insert(0, [f"{self.start_date}T00:00:00", 0]) + if self.end_date and results[-1][0] != f"{self.end_date}T00:00:00": + if self.is_cumulative(): + results.append( + [f"{self.end_date}T00:00:00", results[-1][1]] + ) + else: + results.append([f"{self.end_date}T00:00:00", 0]) + else: + results.append([self.start_date, 0]) + results.append([self.end_date, 0]) + results.insert(0, ["Date", "Count"]) + return JsonResponse(results, safe=False) + + +class AdminHomeUsersDailyView(AdminHomeGraphView): + def get_results(self): + filters = Q() + # filter to show users vs testers + filters = filters & Q(is_tester=self.is_tester) + # filter by date + if self.start_date: + filters = filters & Q(created__gte=self.start_date) + if self.end_date: + filters = filters & Q( + created__lt=parser.parse(self.end_date) + timedelta(days=1) + ) + results = ( + Account.objects.filter(filters) + .annotate( + date=Concat( + TruncDate("created"), + Value("T00:00:00"), + output_field=CharField(), + ) + ) + .values("date") + .annotate(count=Count("id")) + .order_by("date") + ) + results = [[row["date"], row["count"]] for row in results] + return results + + +class AdminHomeUsersCumulativeView(AdminHomeGraphView): + def is_cumulative(self): + return True + + def get_results(self): + conditions = """ + "is_tester"=%s + """ + params = [self.is_tester] + if self.start_date: + conditions = f"""{conditions} AND + "created" >= %s + """ + params.append(self.start_date) + if self.end_date: + conditions = f"""{conditions} AND + "created" < %s + """ + params.append(parser.parse(self.end_date) + timedelta(days=1)) + + with connection.cursor() as cursor: + cursor.execute( + f""" + SELECT "date", (SUM("count") OVER (ORDER BY "date"))::int AS "count" + FROM + (SELECT + CONCAT(("created" AT TIME ZONE '{os.getenv("TIME_ZONE", "America/Los_Angeles")}')::date, + 'T00:00:00') AS "date", + COUNT("id") AS "count" + FROM "home_account" + WHERE {conditions} + GROUP BY "date") subquery + ORDER BY "date" + """, + params, + ) + results = cursor.fetchall() + return list(results) + + +class AdminHomeWalksDailyView(AdminHomeGraphView): + def get_value_type(self): + return None + + def get_results(self): + filters = Q() + # filter to show users vs testers + filters = filters & Q(account__is_tester=self.is_tester) + # filter by date + if self.start_date: + filters = filters & Q(date__gte=self.start_date) + if self.end_date: + filters = filters & Q(date__lte=self.end_date) + results = ( + DailyWalk.objects.filter(filters) + .annotate( + date_time=Concat( + "date", + Value("T00:00:00"), + output_field=CharField(), + ), + ) + .values("date_time") + .annotate( + count=Sum(self.get_value_type()), + ) + .order_by("date_time") + ) + results = [[row["date_time"], row["count"]] for row in results] + return results + + +class AdminHomeStepsDailyView(AdminHomeWalksDailyView): + def get_value_type(self): + return "steps" + + +class AdminHomeDistanceDailyView(AdminHomeWalksDailyView): + def get_value_type(self): + return "distance" + + +class AdminHomeWalksCumulativeView(AdminHomeGraphView): + def is_cumulative(self): + return True + + def get_value_type(self): + return None + + def get_results(self): + conditions = """ + "home_account"."is_tester"=%s + """ + params = [self.is_tester] + if self.start_date: + conditions = f"""{conditions} AND + "home_dailywalk"."date" >= %s + """ + params.append(self.start_date) + if self.end_date: + conditions = f"""{conditions} AND + "home_dailywalk"."date" <= %s + """ + params.append(self.end_date) + + with connection.cursor() as cursor: + cursor.execute( + f""" + SELECT "date", (SUM("count") OVER (ORDER BY "date"))::int AS "count" + FROM + (SELECT + CONCAT("date", 'T00:00:00') AS "date", + SUM("{self.get_value_type()}") AS "count" + FROM "home_dailywalk" + JOIN "home_account" ON "home_account"."id"="home_dailywalk"."account_id" + WHERE {conditions} + GROUP BY "date") subquery + ORDER BY "date" + """, + params, + ) + results = cursor.fetchall() + results = list(results) + return results + + +class AdminHomeStepsCumulativeView(AdminHomeWalksCumulativeView): + def get_value_type(self): + return "steps" + + +class AdminHomeDistanceCumulativeView(AdminHomeWalksCumulativeView): + def get_value_type(self): + return "distance" + + +class AdminContestsView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + # if request.user.is_authenticated: + values = ["contest_id", "start", "end"] + order_by = ["-start"] + results = Contest.objects.values(*values).order_by(*order_by) + return JsonResponse(list(results), safe=False) + # else: + # return HttpResponse(status=401) + + +class AdminUsersView(View): + http_method_names = ["get"] + + # @require_authn + def get(self, request, *args, **kwargs): + serializer = GetUsersReqSerializer(data=request.GET) + if not serializer.is_valid(): + return JsonResponse(serializer.errors, status=422) + + validated = serializer.validated_data + + contest_id = validated["contest_id"] + filters = validated["filters"] + order_by = validated["order_by"] + page = validated["page"] + per_page = validated["per_page"] + + annotate = validated["annotate"] + intentionalwalk_annotate = validated["intentionalwalk_annotate"] + + query = ( + Account.objects.filter(filters) + .values("id", "name", "email", "age", "zip", "created") + .annotate(**annotate) + .order_by(*order_by) + ) + query, links = paginate(request, query, page, per_page) + + iw_query = ( + Account.objects.filter(id__in=(row["id"] for row in query)) + .values("id") + .annotate(**intentionalwalk_annotate) + .order_by(*order_by) + ) + + def update_user_dto(dto, iw_stats): + dto.update(iw_stats) + # at this point, we have enough info to determine if user is "active" + if contest_id: + dto["is_active"] = dto["dw_count"] > 0 or dto["iw_count"] > 0 + return dto + + result_dto = [ + update_user_dto(dto, iw_stat) + for dto, iw_stat in zip(query, iw_query) + ] + resp = GetUsersRespSerializer(result_dto, many=True) + response = JsonResponse(resp.data, safe=False) + if links: + response.headers["Link"] = links + + return response + + +class AdminUsersByZipView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + values = ["zip"] + order_by = ["zip"] + if request.user.is_authenticated: + payload = {} + # filter and annotate based on contest_id + filters = None + annotate = { + "count": Count("zip"), + } + contest_id = request.GET.get("contest_id", None) + if contest_id: + filters = Q(contests__contest_id=contest_id) + else: + filters = Q() + + # filter to show users vs testers + filters = filters & Q( + is_tester=request.GET.get("is_tester", None) == "true" + ) + + # query for totals + results = ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + payload["total"] = {r["zip"]: r["count"] for r in results} + + # now query for new if for contest + if contest_id: + contest = Contest.objects.get(pk=contest_id) + filters = filters & Q( + created__gte=contest.start_promo, + created__lt=contest.end + timedelta(days=1), + ) + results = ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + payload["new"] = {r["zip"]: r["count"] for r in results} + + return JsonResponse(payload) + else: + return HttpResponse(status=401) + + +class AdminUsersActiveByZipView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + contest_id = request.GET.get("contest_id", None) + is_tester = request.GET.get("is_tester", None) == "true" + if not contest_id: + return HttpResponse(status=422) + elif request.user.is_authenticated: + payload = {} + contest = Contest.objects.get(pk=contest_id) + + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT zip, COUNT(*) + FROM ( + SELECT DISTINCT(home_account.id), home_account.zip + FROM home_account + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + LEFT JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + LEFT JOIN home_intentionalwalk ON home_account.id=home_intentionalwalk.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + ((home_dailywalk.id IS NOT NULL AND home_dailywalk.date BETWEEN %s AND %s) OR + (home_intentionalwalk.id IS NOT NULL AND + home_intentionalwalk.start >= %s AND home_intentionalwalk.start < %s)) + ) subquery + GROUP BY zip + """, + [ + is_tester, + contest_id, + contest.start, + contest.end, + contest.start, + contest.end + timedelta(days=1), + ], + ) + rows = cursor.fetchall() + payload["total"] = {row[0]: row[1] for row in rows} + cursor.execute( + """ + SELECT zip, COUNT(*) + FROM ( + SELECT DISTINCT(home_account.id), home_account.zip + FROM home_account + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + LEFT JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + LEFT JOIN home_intentionalwalk ON home_account.id=home_intentionalwalk.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + home_account.created >= %s AND home_account.created < %s AND + ((home_dailywalk.id IS NOT NULL AND home_dailywalk.date BETWEEN %s AND %s) OR + (home_intentionalwalk.id IS NOT NULL AND + home_intentionalwalk.start >= %s AND home_intentionalwalk.start < %s)) + ) subquery + GROUP BY zip + """, + [ + is_tester, + contest_id, + contest.start_promo, + contest.end + timedelta(days=1), + contest.start, + contest.end, + contest.start, + contest.end + timedelta(days=1), + ], + ) + rows = cursor.fetchall() + payload["new"] = {row[0]: row[1] for row in rows} + + return JsonResponse(payload) + else: + return HttpResponse(status=401) + + +class AdminUsersByZipMedianStepsView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + if request.user.is_authenticated: + is_tester = request.GET.get("is_tester", None) == "true" + contest_id = request.GET.get("contest_id", None) + if contest_id is None: + return HttpResponse(status=422) + contest = Contest.objects.get(pk=contest_id) + payload = {} + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sum) + FROM ( + SELECT home_account.id AS id, SUM(home_dailywalk.steps) AS sum + FROM home_account + JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + home_dailywalk.date BETWEEN %s AND %s + GROUP BY (home_account.id) + ) subquery + """, + [is_tester, contest_id, contest.start, contest.end], + ) + row = cursor.fetchone() + + payload["all"] = row[0] + cursor.execute( + """ + SELECT zip, PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sum) + FROM ( + SELECT home_account.id AS id, home_account.zip AS zip, SUM(home_dailywalk.steps) AS sum + FROM home_account + JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + home_dailywalk.date BETWEEN %s AND %s + GROUP BY (home_account.id, home_account.zip) + ) subquery + GROUP BY zip + """, + [is_tester, contest_id, contest.start, contest.end], + ) + rows = cursor.fetchall() + for row in rows: + payload[row[0]] = row[1] + + response = JsonResponse(payload) + return response + else: + return HttpResponse(status=401) + + +class AdminHistogramView(View): + http_method_names = ["get"] + + supported_models = { + "users": Account, + "dailywalk": DailyWalk, + "intentionalwalk": IntentionalWalk, + "leaderboard": Leaderboard, + } + + @require_authn + def get(self, request: HttpRequest, model_name: str) -> HttpResponse: + """Get histogram data for a given model across a numberic field. + + Binning is done by the bin_size parameter, bin_count parameter, + or bin_custom parameter. + + bin_size: The size of each bin. + bin_count: The total number of bins. + bin_custom: A comma separated list of bin edges. + """ + if model_name not in self.supported_models: + return HttpResponse(status=404) + + serializer = HistogramReqSerializer( + data=request.GET, model=self.supported_models[model_name] + ) + if not serializer.is_valid(): + return JsonResponse(serializer.errors, status=422) + + # TODO: Implement a ResponseSerializer + # to handle serialization of the response + # (with filling of the bins) + # to either JSON or CSV format. + res: ValidatedHistogramReq = serializer.validated_data + + # even bins either specified by bin_size or bin_size computed from bin_count + if res.get("bin_size"): + return JsonResponse( + { + "data": list( + self.fill_missing_bin_idx( + query_set=res["query_set"], + bin_size=res["bin_size"], + total_bin_ct=res["bin_count"], + ) + ), + "unit": res["unit"], + "bin_size": res["bin_size"], + } + ) + # custom bins + return JsonResponse( + { + "data": list( + self.fill_missing_bin_idx( + query_set=res["query_set"], + bin_custom=res["bin_custom"], + total_bin_ct=res["bin_count"], + ) + ), + "unit": res["unit"], + "bin_custom": res["bin_custom"], + } + ) + + def fill_missing_bin_idx( + self, + query_set, + bin_size: int = None, + bin_custom: list = None, + total_bin_ct: int = 0, + ): + """Fill in missing bin intervals lazily. + + This is because the histogram is generated from a query set that may not have + found data in certain bins. + + For example, if the bins were [0, 18, 20, 33, 50, 70], + which creates bins from 0-17, 18-20, 20-33, 33-50, 50-70. + + There may be no users in in the 18-20 range, and no users in the 51 and 70. + In other words, missing on bin_idx = 1 and bin_idx = 4. + The query would not return any groupings for the [18, 20], or [50, 70] + This function will fill in those missing bins with a count of 0. + """ + + def create_filler(cursor, bin_size, bin_custom): + res = {} + # bin_start and bin_end are inclusive. + if bin_custom: + res["bin_start"] = bin_custom[cursor] + if cursor + 1 < len(bin_custom): + res["bin_end"] = bin_custom[cursor + 1] + else: + res["bin_start"] = cursor * bin_size + res["bin_end"] = (cursor + 1) * bin_size + # Done down here to maintain stable order of keys. + res["count"] = 0 + res["bin_idx"] = cursor + return res + + bin_idx_counter = itertools.count() + cursor = 0 + for bin in query_set: + cursor = next(bin_idx_counter) + curr_idx = bin["bin_idx"] + while curr_idx > cursor: + yield create_filler( + cursor=cursor, bin_size=bin_size, bin_custom=bin_custom + ) + cursor = next(bin_idx_counter) + yield bin + + cursor = next(bin_idx_counter) + # Fill in the rest of the bins with 0 count, + # until we reach the total expected count of bins. + while cursor and cursor < total_bin_ct: + yield create_filler( + cursor=cursor, bin_size=bin_size, bin_custom=bin_custom + ) + cursor = next(bin_idx_counter) diff --git a/home/views/api/api.py b/home/views/api/api.py new file mode 100644 index 00000000..e69de29b diff --git a/home/views/api/appuser.py b/home/views/api/appuser.py index 127b7aa8..7d5d7d14 100644 --- a/home/views/api/appuser.py +++ b/home/views/api/appuser.py @@ -1,332 +1,332 @@ -import json - -from django.core.exceptions import ObjectDoesNotExist -from django.http import JsonResponse -from django.utils.decorators import method_decorator -from django.views import View -from django.views.decorators.csrf import csrf_exempt - -from home.models import Account, Device -from home.models.account import ( - SAN_FRANCISCO_ZIP_CODES, - GenderLabels, - IsLatinoLabels, - RaceLabels, - SexualOrientationLabels, -) - -from .utils import validate_request_json - - -# Determines whether Account is tester account, based on name prefix -def is_tester(name_field: str) -> bool: - possible_prefixes = ["tester-", "tester ", "tester_"] - return any( - [name_field.lower().startswith(prefix) for prefix in possible_prefixes] - ) - - -# Validates Account input data. Raises AssertionError if field is invalid. -# Does not check for required fields since that is done by -# validate_request_json -def validate_account_input(data: dict): - """ - Validation of account data input: - * name: not empty - * zip: length between 5 and 10 - * age: between 1 and 200 - * is_latino: value must be in IsLatinoLabels - * race: values must be in RaceLabels - * race_other: must be specified when race includes 'OT' - * gender: value must be in GenderLabels - * gender_other: must be specified when gender is 'OT' - * sexual_orien: value must be in SexualOrientationLabels - * sexual_orien_other: must be specified when sexual_orien is 'OT' - - """ - if ( - data.get("name") is not None - ): # Required field but existence checked in validate_request_json - assert len(data["name"]) > 0, "Invalid name" - if ( - data.get("zip") is not None - ): # Required field but existence checked in validate_request_json - assert ( - len(data["zip"]) >= 5 and len(data["zip"]) <= 10 - ), "Invalid zip code" - if ( - data.get("age") is not None - ): # Required field but existence checked in validate_request_json - assert data["age"] > 1 and data["age"] < 200, "Invalid age" - if data.get("is_latino") is not None: - is_latino = data["is_latino"] - assert ( - is_latino in IsLatinoLabels.__members__ - ), f"Invalid is latino or hispanic selection '{is_latino}'" - if data.get("race") is not None: - for item in data["race"]: - assert ( - item in RaceLabels.__members__ - ), f"Invalid race selection '{item}'" - if "OT" in data["race"]: - assert ( - len(data.get("race_other", "")) > 0 - ), "Must specify 'other' race" - else: - assert ( - data.get("race_other") is None - ), "'race_other' should not be specified without race 'OT'" - elif data.get("race_other") is not None: - assert False, "'race_other' should not be specified without 'race'" - if data.get("gender") is not None: - gender = data["gender"] - assert ( - gender in GenderLabels.__members__ - ), f"Invalid gender selection '{gender}'" - if data["gender"] == "OT": - assert ( - len(data.get("gender_other", "")) > 0 - ), "Must specify 'other' gender" - else: - assert ( - data.get("gender_other") is None - ), "'gender_other' should not be specified without 'OT'" - elif data.get("gender_other") is not None: - assert False, "'gender_other' should not be specified without 'gender'" - if data.get("sexual_orien") is not None: - sexual_orientation = data["sexual_orien"] - assert ( - sexual_orientation in SexualOrientationLabels.__members__ - ), f"Invalid sexual orientation selection '{sexual_orientation}'" - if data["sexual_orien"] == "OT": - assert ( - len(data.get("sexual_orien_other", "")) > 0 - ), "Must specify 'other' sexual orientation" - else: - assert ( - data.get("sexual_orien_other") is None - ), "'sexual_orien_other' should not be specified without 'OT'" - elif data.get("sexual_orien_other") is not None: - assert ( - False - ), "'sexual_orien_other' should not be specified without 'gender'" - - -def update_account(acct: Account, data: dict): - # Data fields vary based on registration screen - - # Screen 1: Name, Email, Zip, Age. - # Not possible to update email - if data.get("name") is not None: - acct.name = data["name"] - acct.is_tester = is_tester(data["name"]) - - if data.get("zip") is not None: - acct.zip = data["zip"] - acct.is_sf_resident = data["zip"] in SAN_FRANCISCO_ZIP_CODES - - if data.get("age") is not None: - acct.age = data["age"] - - # Screen 2. Latino/Hispanic Origin - if data.get("is_latino") is not None: - acct.is_latino = data.get("is_latino") - - # Screen 3. Race - if data.get("race") is not None: - acct.race = data.get("race", []) - acct.race_other = data.get("race_other") - - # Screen 4. Gender Identity - if data.get("gender") is not None: - acct.gender = data.get("gender") - acct.gender_other = data.get("gender_other") - - # Screen 5. Sexual Orientation - if data.get("sexual_orien") is not None: - acct.sexual_orien = data.get("sexual_orien") - acct.sexual_orien_other = data.get("sexual_orien_other") - acct.save() - - -# Exempt from csrf validation -@method_decorator(csrf_exempt, name="dispatch") -class AppUserCreateView(View): - """API interface to register a device and a user account on app install. - If present, the same endpoint will update user details except email. - """ - - http_method_names = ["post", "put"] - - def put(self, request, *args, **kwargs): - json_data = json.loads(request.body) - - # Validate json. If account_id is missing, send back the response - json_status = validate_request_json( - json_data, required_fields=["account_id"] - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Update user attributes. - device = Device.objects.get(device_id=json_data["account_id"]) - account = Account.objects.get(email__iexact=device.account.email) - update_account(account, json_data) - message = "Account updated successfully" - - return JsonResponse( - { - "status": "success", - "message": message, - } - ) - - def post(self, request, *args, **kwargs): - # Parse the body json - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response - json_status = validate_request_json( - json_data, - required_fields=["name", "email", "zip", "age", "account_id"], - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Update user attributes if the object exists, else create. - # EMAIL CANNOT BE UPDATED! - # NOTE: Ideally, email ids should be validated by actually sending - # emails. Currently, accidental/intentional use of the same email id - # will share data across devices. In such an instance, a new account - # with the correct email must be created to separate data. Otherwise, - # attribution will be to the account email created first. - - # For a participating device - try: - # NOTE: Account id here maps to a device id. Perhaps the API - # definition could be changed in the future. - # Get the registered device if it exists - device = Device.objects.get(device_id=json_data["account_id"]) - # If it is an email update fail and return - if device.account.email.lower() != json_data["email"].lower(): - return JsonResponse( - { - "status": "error", - "message": "Email cannot be updated. Contact admin", - } - ) - - # Otherwise, update the account's other details - account = Account.objects.get(email__iexact=json_data["email"]) - update_account(account, json_data) - message = "Device & account updated successfully" - - # This implies that it is a new device - except ObjectDoesNotExist: - # Check if the user account exists. If not, create it - try: - account = Account.objects.get(email__iexact=json_data["email"]) - update_account(account, json_data) - message = "Account updated successfully" - account_updated = True - except ObjectDoesNotExist: - # Partially create account first, with required fields - account = Account.objects.create( - email=json_data["email"], - name=json_data["name"], - zip=json_data["zip"], - age=json_data["age"], - is_tester=is_tester(json_data["name"]), - is_sf_resident=json_data["zip"] in SAN_FRANCISCO_ZIP_CODES, - ) - account_updated = False - - # Create a new device object and link it to the account - device = Device.objects.create( - device_id=json_data["account_id"], account=account - ) - updated_str = "updated" if account_updated else "registered" - message = f"Device registered & account {updated_str} successfully" - - # To validate, retrieve the object back and send the details - try: - device = Device.objects.get(device_id=json_data["account_id"]) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "App User failed to save successfully. Please retry" - ), - } - ) - - return JsonResponse( - { - "status": "success", - "message": message, - "payload": { - "account_id": device.device_id, - "name": account.name, - "email": account.email, - "zip": account.zip, - "age": account.age, - "is_latino": account.is_latino, - "race": list(account.race), - "race_other": account.race_other, - "gender": account.gender, - "gender_other": account.gender_other, - "sexual_orien": account.sexual_orien, - "sexual_orien_other": account.sexual_orien_other, - }, - } - ) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) - - -# Exempt from csrf validation -@method_decorator(csrf_exempt, name="dispatch") -class AppUserDeleteView(View): - """API interface to delete a user account""" - - http_method_names = ["delete"] - - def delete(self, request, *args, **kwargs): - json_data = json.loads(request.body) - - # Validate json. If account_id is missing, send back the response - json_status = validate_request_json( - json_data, required_fields=["account_id"] - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Look for specified user/device account - try: - device = Device.objects.get(device_id=json_data["account_id"]) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": "Cannot find user with specified account id.", - } - ) - - device.account.delete() - device.delete() - - return JsonResponse( - { - "status": "success", - "message": "Account deleted successfully", - } - ) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) +import json + +from django.core.exceptions import ObjectDoesNotExist +from django.http import JsonResponse +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from home.models import Account, Device +from home.models.account import ( + SAN_FRANCISCO_ZIP_CODES, + GenderLabels, + IsLatinoLabels, + RaceLabels, + SexualOrientationLabels, +) + +from .utils import validate_request_json + + +# Determines whether Account is tester account, based on name prefix +def is_tester(name_field: str) -> bool: + possible_prefixes = ["tester-", "tester ", "tester_"] + return any( + [name_field.lower().startswith(prefix) for prefix in possible_prefixes] + ) + + +# Validates Account input data. Raises AssertionError if field is invalid. +# Does not check for required fields since that is done by +# validate_request_json +def validate_account_input(data: dict): + """ + Validation of account data input: + * name: not empty + * zip: length between 5 and 10 + * age: between 1 and 200 + * is_latino: value must be in IsLatinoLabels + * race: values must be in RaceLabels + * race_other: must be specified when race includes 'OT' + * gender: value must be in GenderLabels + * gender_other: must be specified when gender is 'OT' + * sexual_orien: value must be in SexualOrientationLabels + * sexual_orien_other: must be specified when sexual_orien is 'OT' + + """ + if ( + data.get("name") is not None + ): # Required field but existence checked in validate_request_json + assert len(data["name"]) > 0, "Invalid name" + if ( + data.get("zip") is not None + ): # Required field but existence checked in validate_request_json + assert ( + len(data["zip"]) >= 5 and len(data["zip"]) <= 10 + ), "Invalid zip code" + if ( + data.get("age") is not None + ): # Required field but existence checked in validate_request_json + assert data["age"] > 1 and data["age"] < 200, "Invalid age" + if data.get("is_latino") is not None: + is_latino = data["is_latino"] + assert ( + is_latino in IsLatinoLabels.__members__ + ), f"Invalid is latino or hispanic selection '{is_latino}'" + if data.get("race") is not None: + for item in data["race"]: + assert ( + item in RaceLabels.__members__ + ), f"Invalid race selection '{item}'" + if "OT" in data["race"]: + assert ( + len(data.get("race_other", "")) > 0 + ), "Must specify 'other' race" + else: + assert ( + data.get("race_other") is None + ), "'race_other' should not be specified without race 'OT'" + elif data.get("race_other") is not None: + assert False, "'race_other' should not be specified without 'race'" + if data.get("gender") is not None: + gender = data["gender"] + assert ( + gender in GenderLabels.__members__ + ), f"Invalid gender selection '{gender}'" + if data["gender"] == "OT": + assert ( + len(data.get("gender_other", "")) > 0 + ), "Must specify 'other' gender" + else: + assert ( + data.get("gender_other") is None + ), "'gender_other' should not be specified without 'OT'" + elif data.get("gender_other") is not None: + assert False, "'gender_other' should not be specified without 'gender'" + if data.get("sexual_orien") is not None: + sexual_orientation = data["sexual_orien"] + assert ( + sexual_orientation in SexualOrientationLabels.__members__ + ), f"Invalid sexual orientation selection '{sexual_orientation}'" + if data["sexual_orien"] == "OT": + assert ( + len(data.get("sexual_orien_other", "")) > 0 + ), "Must specify 'other' sexual orientation" + else: + assert ( + data.get("sexual_orien_other") is None + ), "'sexual_orien_other' should not be specified without 'OT'" + elif data.get("sexual_orien_other") is not None: + assert ( + False + ), "'sexual_orien_other' should not be specified without 'sexual_orien'" + + +def update_account(acct: Account, data: dict): + # Data fields vary based on registration screen + + # Screen 1: Name, Email, Zip, Age. + # Not possible to update email + if data.get("name") is not None: + acct.name = data["name"] + acct.is_tester = is_tester(data["name"]) + + if data.get("zip") is not None: + acct.zip = data["zip"] + acct.is_sf_resident = data["zip"] in SAN_FRANCISCO_ZIP_CODES + + if data.get("age") is not None: + acct.age = data["age"] + + # Screen 2. Latino/Hispanic Origin + if data.get("is_latino") is not None: + acct.is_latino = data.get("is_latino") + + # Screen 3. Race + if data.get("race") is not None: + acct.race = data.get("race", []) + acct.race_other = data.get("race_other") + + # Screen 4. Gender Identity + if data.get("gender") is not None: + acct.gender = data.get("gender") + acct.gender_other = data.get("gender_other") + + # Screen 5. Sexual Orientation + if data.get("sexual_orien") is not None: + acct.sexual_orien = data.get("sexual_orien") + acct.sexual_orien_other = data.get("sexual_orien_other") + acct.save() + + +# Exempt from csrf validation +@method_decorator(csrf_exempt, name="dispatch") +class AppUserCreateView(View): + """API interface to register a device and a user account on app install. + If present, the same endpoint will update user details except email. + """ + + http_method_names = ["post", "put"] + + def put(self, request, *args, **kwargs): + json_data = json.loads(request.body) + + # Validate json. If account_id is missing, send back the response + json_status = validate_request_json( + json_data, required_fields=["account_id"] + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Update user attributes. + device = Device.objects.get(device_id=json_data["account_id"]) + account = Account.objects.get(email__iexact=device.account.email) + update_account(account, json_data) + message = "Account updated successfully" + + return JsonResponse( + { + "status": "success", + "message": message, + } + ) + + def post(self, request, *args, **kwargs): + # Parse the body json + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response + json_status = validate_request_json( + json_data, + required_fields=["name", "email", "zip", "age", "account_id"], + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Update user attributes if the object exists, else create. + # EMAIL CANNOT BE UPDATED! + # NOTE: Ideally, email ids should be validated by actually sending + # emails. Currently, accidental/intentional use of the same email id + # will share data across devices. In such an instance, a new account + # with the correct email must be created to separate data. Otherwise, + # attribution will be to the account email created first. + + # For a participating device + try: + # NOTE: Account id here maps to a device id. Perhaps the API + # definition could be changed in the future. + # Get the registered device if it exists + device = Device.objects.get(device_id=json_data["account_id"]) + # If it is an email update fail and return + if device.account.email.lower() != json_data["email"].lower(): + return JsonResponse( + { + "status": "error", + "message": "Email cannot be updated. Contact admin", + } + ) + + # Otherwise, update the account's other details + account = Account.objects.get(email__iexact=json_data["email"]) + update_account(account, json_data) + message = "Device & account updated successfully" + + # This implies that it is a new device + except ObjectDoesNotExist: + # Check if the user account exists. If not, create it + try: + account = Account.objects.get(email__iexact=json_data["email"]) + update_account(account, json_data) + message = "Account updated successfully" + account_updated = True + except ObjectDoesNotExist: + # Partially create account first, with required fields + account = Account.objects.create( + email=json_data["email"], + name=json_data["name"], + zip=json_data["zip"], + age=json_data["age"], + is_tester=is_tester(json_data["name"]), + is_sf_resident=json_data["zip"] in SAN_FRANCISCO_ZIP_CODES, + ) + account_updated = False + + # Create a new device object and link it to the account + device = Device.objects.create( + device_id=json_data["account_id"], account=account + ) + updated_str = "updated" if account_updated else "registered" + message = f"Device registered & account {updated_str} successfully" + + # To validate, retrieve the object back and send the details + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "App User failed to save successfully. Please retry" + ), + } + ) + + return JsonResponse( + { + "status": "success", + "message": message, + "payload": { + "account_id": device.device_id, + "name": account.name, + "email": account.email, + "zip": account.zip, + "age": account.age, + "is_latino": account.is_latino, + "race": list(account.race), + "race_other": account.race_other, + "gender": account.gender, + "gender_other": account.gender_other, + "sexual_orien": account.sexual_orien, + "sexual_orien_other": account.sexual_orien_other, + }, + } + ) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) + + +# Exempt from csrf validation +@method_decorator(csrf_exempt, name="dispatch") +class AppUserDeleteView(View): + """API interface to delete a user account""" + + http_method_names = ["delete"] + + def delete(self, request, *args, **kwargs): + json_data = json.loads(request.body) + + # Validate json. If account_id is missing, send back the response + json_status = validate_request_json( + json_data, required_fields=["account_id"] + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Look for specified user/device account + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": "Cannot find user with specified account id.", + } + ) + + device.account.delete() + device.delete() + + return JsonResponse( + { + "status": "success", + "message": "Account deleted successfully", + } + ) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) diff --git a/home/views/api/appuserv2.py b/home/views/api/appuserv2.py new file mode 100644 index 00000000..9a3f89e1 --- /dev/null +++ b/home/views/api/appuserv2.py @@ -0,0 +1,126 @@ +import json +from datetime import date +from typing import List + +from django.core.exceptions import ObjectDoesNotExist +from django.shortcuts import get_object_or_404 +from ninja import Router +from ninja.errors import HttpError + +from home.models import Account, Device +from home.models.account import ( + SAN_FRANCISCO_ZIP_CODES, + GenderLabels, + IsLatinoLabels, + RaceLabels, + SexualOrientationLabels, +) +from home.views.api.schemas.account import AccountSchema, DeviceSchema + +router = Router() + + +# Determines whether Account is tester account, based on name prefix +def is_tester(name_field: str) -> bool: + possible_prefixes = ["tester-", "tester ", "tester_"] + return any( + [name_field.lower().startswith(prefix) for prefix in possible_prefixes] + ) + + +def update_account(account: Account, json_data: dict): + # Data fields vary based on registration screen + for attr, value in json_data.items(): + if attr != "email": + setattr(account, attr, value) + account.save() + + +@router.post("/appuser", response={201: AccountSchema}) +def create_appuser(request, payload: AccountSchema): + # Parse the body json + json_data = payload.dict() + # For a participating device + try: + # NOTE: Account id here maps to a device id. Perhaps the API + # definition could be changed in the future. + # Get the registered device if it exists + device = Device.objects.get(device_id=json_data["account_id"]) + # If it is an email update fail and return + if device.account.email.lower() != json_data["email"].lower(): + raise HttpError(400, "Email cannot be updated. Contact admin") + + # Otherwise, update the account's other details + account = Account.objects.get(email__iexact=json_data["email"]) + update_account(account, json_data) + return 201, {"account_id": device.device_id, **account.__dict__} + + # This implies that it is a new device + except Device.DoesNotExist: + # Check if the user account exists. If not, create it + try: + account = Account.objects.get(email__iexact=json_data["email"]) + update_account(account, json_data) + message = "Account updated successfully" + account_updated = True + except Account.DoesNotExist: + # Partially create account first, with required fields + account = Account.objects.create( + email=json_data["email"], + name=json_data["name"], + zip=json_data["zip"], + age=json_data["age"], + is_tester=is_tester(json_data["name"]), + is_sf_resident=json_data["zip"] in SAN_FRANCISCO_ZIP_CODES, + ) + account_updated = False + + # Create a new device object and link it to the account + device = Device.objects.create( + device_id=json_data["account_id"], account=account + ) + + # return 201, { + # "account_id": device.device_id, + # "name": account.name, + # "email": account.email, + # "zip": account.zip, + # "age": account.age, + # "is_latino": account.is_latino, + # "race": account.race, + # "race_other": account.race_other, + # "gender": account.gender, + # "gender_other": account.gender_other, + # "sexual_orien": account.sexual_orien, + # "sexual_orien_other": account.sexual_orien_other, + # } + + return 201, {"account_id": device.device_id, **account.__dict__} + + +# @router.get("/employees/{employee_id}", response=EmployeeOut) +# def get_employee(request, employee_id: int): +# employee = get_object_or_404(Employee, id=employee_id) +# return employee + + +# @router.get("/employees", response=List[EmployeeOut]) +# def list_employees(request): +# qs = Employee.objects.all() +# return qs + + +@router.put("/appuser/{account_id}") +def update_appuser(request, account_id: int, payload: AccountSchema): + account = get_object_or_404(Account, id=account_id) + for attr, value in payload.dict().items(): + setattr(account, attr, value) + account.save() + return account + + +@router.delete("/appuser/{account_id}") +def delete_appuser(request, account_id: int): + account = get_object_or_404(Account, id=account_id) + account.delete() + return {"success": True} diff --git a/home/views/api/contest.py b/home/views/api/contest.py index 7093df2b..220bb8b5 100644 --- a/home/views/api/contest.py +++ b/home/views/api/contest.py @@ -1,37 +1,37 @@ -from django.http import JsonResponse -from django.utils.decorators import method_decorator -from django.views import View -from django.views.decorators.csrf import csrf_exempt - -from home.models import Contest - - -@method_decorator(csrf_exempt, name="dispatch") -class ContestCurrentView(View): - """View to retrieve current Contest""" - - model = Contest - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - # get the current/next Contest - contest = Contest.active() - if contest is None: - return JsonResponse( - { - "status": "error", - "message": "There are no contests", - } - ) - return JsonResponse( - { - "status": "success", - "payload": { - "contest_id": contest.contest_id, - "start_baseline": contest.start_baseline, - "start_promo": contest.start_promo, - "start": contest.start, - "end": contest.end, - }, - } - ) +from django.http import JsonResponse +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from home.models import Contest + + +@method_decorator(csrf_exempt, name="dispatch") +class ContestCurrentView(View): + """View to retrieve current Contest""" + + model = Contest + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + # get the current/next Contest + contest = Contest.active() + if contest is None: + return JsonResponse( + { + "status": "error", + "message": "There are no contests", + } + ) + return JsonResponse( + { + "status": "success", + "payload": { + "contest_id": contest.contest_id, + "start_baseline": contest.start_baseline, + "start_promo": contest.start_promo, + "start": contest.start, + "end": contest.end, + }, + } + ) diff --git a/home/views/api/dailywalk.py b/home/views/api/dailywalk.py index 2f366208..b86f5452 100644 --- a/home/views/api/dailywalk.py +++ b/home/views/api/dailywalk.py @@ -1,220 +1,220 @@ -import json -import logging -from datetime import date - -from django.core.exceptions import ObjectDoesNotExist -from django.http import JsonResponse -from django.utils.decorators import method_decorator -from django.views import View -from django.views.decorators.csrf import csrf_exempt - -from home.models import Contest, DailyWalk, Device - - -from .utils import validate_request_json - -logger = logging.getLogger(__name__) - - -# Exempt from csrf validation -@method_decorator(csrf_exempt, name="dispatch") -class DailyWalkCreateView(View): - """View to create or update a list of dailywalks from a registered device""" - - model = DailyWalk - http_method_names = ["post"] - - def post(self, request, *args, **kwargs): - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response message - json_status = validate_request_json( - json_data, - required_fields=["account_id", "daily_walks"], - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Get the device if already registered - try: - device = Device.objects.get(device_id=json_data["account_id"]) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered device - " - f"{json_data['account_id']}." - " Please register first!" - ), - } - ) - - # Json response template - json_response = { - "status": "success", - "message": "Dailywalks recorded successfully", - "payload": { - "account_id": device.device_id, - "daily_walks": [], - }, - } - - active_contests = set() - - for daily_walk_data in json_data["daily_walks"]: - # Validate data - json_status = validate_request_json( - daily_walk_data, - required_fields=["date", "steps", "distance"], - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - walk_date = daily_walk_data["date"] - contest = Contest.active( - for_date=date.fromisoformat(walk_date), strict=True - ) - if contest is not None: - active_contests.add(contest) - - # Check if there is already an entry for this date. If there is, - # update the entry. - # NOTE: By definition, there should be one and only one entry for - # a given email and date. - # NOTE: This is a potential vulnerability. Since there is no email - # authentication at the moment, anyone can simply spoof an email - # id with a new device and overwrite daily walk data for the - # target email. This is also a result of no session auth - # (can easily hit the api directly) - try: - # Updation - daily_walk = DailyWalk.objects.get( - account=device.account, date=walk_date - ) - daily_walk.steps = daily_walk_data["steps"] - daily_walk.distance = daily_walk_data["distance"] - daily_walk.device_id = json_data["account_id"] - daily_walk.save() - except ObjectDoesNotExist: - # Creation if object is missing - daily_walk = DailyWalk.objects.create( - date=walk_date, - steps=daily_walk_data["steps"], - distance=daily_walk_data["distance"], - device=device, - ) - - # Update the json object - json_response["payload"]["daily_walks"].append( - { - "date": daily_walk.date, - "steps": daily_walk.steps, - "distance": daily_walk.distance, - } - ) - - # Register contest for account if the day falls between contest dates - contest = Contest.active(for_date=date.today(), strict=True) - if contest: - active_contests.add(contest) - try: - acct = device.account - acct.contests.add(contest) - except Exception: - logger.error( - "Could not associate contest " - f"{contest} with account {acct}!", - exc_info=True, - ) - else: - # No active contest - pass - - # Update Leaderboard - for contest in active_contests: - DailyWalk.update_leaderboard(device=device, contest=contest) - - return JsonResponse(json_response) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) - - -# Should pagination be added? -@method_decorator(csrf_exempt, name="dispatch") -class DailyWalkListView(View): - """View to retrieve Daily Walks""" - - model = DailyWalk - http_method_names = ["post"] - - def post(self, request, *args, **kwargs): - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response message - json_status = validate_request_json( - json_data, required_fields=["account_id"] - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Get the device if already registered - try: - device = Device.objects.get(device_id=json_data["account_id"]) - # appuser = AppUser.objects.get(account_id=json_data["account_id"]) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered device - " - f"{json_data['account_id']}." - " Please register first!" - ), - } - ) - - # Get walks from tied to this account - # NOTE: This is very hacky and cannot distinguish between legit and - # fake users. - # Someone can simply install the app on a new device and use a known - # email id and have the metrics simply aggregated. - # For the simple use case, this is likely not an issue and would need - # to be handled manually if needed - daily_walks = DailyWalk.objects.filter(account=device.account) - - # Hacky serializer - total_steps = 0 - total_distance = 0 - daily_walk_list = [] - for daily_walk in daily_walks: - daily_walk_list.append( - { - "date": daily_walk.date, - "steps": daily_walk.steps, - "distance": daily_walk.distance, - } - ) - total_steps += daily_walk.steps - total_distance += daily_walk.distance - - # Sort the list based on the date - daily_walk_list = sorted( - daily_walk_list, key=lambda x: x["date"], reverse=True - ) - # Create the payload with meta information and send it back - payload = { - "daily_walks": daily_walk_list, - "total_steps": total_steps, - "total_distance": total_distance, - "status": "success", - } - return JsonResponse(payload) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) +import json +import logging +from datetime import date + +from django.core.exceptions import ObjectDoesNotExist +from django.http import JsonResponse +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from home.models import Contest, DailyWalk, Device + + +from .utils import validate_request_json + +logger = logging.getLogger(__name__) + + +# Exempt from csrf validation +@method_decorator(csrf_exempt, name="dispatch") +class DailyWalkCreateView(View): + """View to create or update a list of dailywalks from a registered device""" + + model = DailyWalk + http_method_names = ["post"] + + def post(self, request, *args, **kwargs): + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response message + json_status = validate_request_json( + json_data, + required_fields=["account_id", "daily_walks"], + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Get the device if already registered + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered device - " + f"{json_data['account_id']}." + " Please register first!" + ), + } + ) + + # Json response template + json_response = { + "status": "success", + "message": "Dailywalks recorded successfully", + "payload": { + "account_id": device.device_id, + "daily_walks": [], + }, + } + + active_contests = set() + + for daily_walk_data in json_data["daily_walks"]: + # Validate data + json_status = validate_request_json( + daily_walk_data, + required_fields=["date", "steps", "distance"], + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + walk_date = daily_walk_data["date"] + contest = Contest.active( + for_date=date.fromisoformat(walk_date), strict=True + ) + if contest is not None: + active_contests.add(contest) + + # Check if there is already an entry for this date. If there is, + # update the entry. + # NOTE: By definition, there should be one and only one entry for + # a given email and date. + # NOTE: This is a potential vulnerability. Since there is no email + # authentication at the moment, anyone can simply spoof an email + # id with a new device and overwrite daily walk data for the + # target email. This is also a result of no session auth + # (can easily hit the api directly) + try: + # Updation + daily_walk = DailyWalk.objects.get( + account=device.account, date=walk_date + ) + daily_walk.steps = daily_walk_data["steps"] + daily_walk.distance = daily_walk_data["distance"] + daily_walk.device_id = json_data["account_id"] + daily_walk.save() + except ObjectDoesNotExist: + # Creation if object is missing + daily_walk = DailyWalk.objects.create( + date=walk_date, + steps=daily_walk_data["steps"], + distance=daily_walk_data["distance"], + device=device, + ) + + # Update the json object + json_response["payload"]["daily_walks"].append( + { + "date": daily_walk.date, + "steps": daily_walk.steps, + "distance": daily_walk.distance, + } + ) + + # Register contest for account if the day falls between contest dates + contest = Contest.active(for_date=date.today(), strict=True) + if contest: + active_contests.add(contest) + try: + acct = device.account + acct.contests.add(contest) + except Exception: + logger.error( + "Could not associate contest " + f"{contest} with account {acct}!", + exc_info=True, + ) + else: + # No active contest + pass + + # Update Leaderboard + for contest in active_contests: + DailyWalk.update_leaderboard(device=device, contest=contest) + + return JsonResponse(json_response) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) + + +# Should pagination be added? +@method_decorator(csrf_exempt, name="dispatch") +class DailyWalkListView(View): + """View to retrieve Daily Walks""" + + model = DailyWalk + http_method_names = ["post"] + + def post(self, request, *args, **kwargs): + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response message + json_status = validate_request_json( + json_data, required_fields=["account_id"] + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Get the device if already registered + try: + device = Device.objects.get(device_id=json_data["account_id"]) + # appuser = AppUser.objects.get(account_id=json_data["account_id"]) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered device - " + f"{json_data['account_id']}." + " Please register first!" + ), + } + ) + + # Get walks from tied to this account + # NOTE: This is very hacky and cannot distinguish between legit and + # fake users. + # Someone can simply install the app on a new device and use a known + # email id and have the metrics simply aggregated. + # For the simple use case, this is likely not an issue and would need + # to be handled manually if needed + daily_walks = DailyWalk.objects.filter(account=device.account) + + # Hacky serializer + total_steps = 0 + total_distance = 0 + daily_walk_list = [] + for daily_walk in daily_walks: + daily_walk_list.append( + { + "date": daily_walk.date, + "steps": daily_walk.steps, + "distance": daily_walk.distance, + } + ) + total_steps += daily_walk.steps + total_distance += daily_walk.distance + + # Sort the list based on the date + daily_walk_list = sorted( + daily_walk_list, key=lambda x: x["date"], reverse=True + ) + # Create the payload with meta information and send it back + payload = { + "daily_walks": daily_walk_list, + "total_steps": total_steps, + "total_distance": total_distance, + "status": "success", + } + return JsonResponse(payload) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) diff --git a/home/views/api/export.py b/home/views/api/export.py index 193c434d..446348f9 100644 --- a/home/views/api/export.py +++ b/home/views/api/export.py @@ -1,279 +1,279 @@ -import csv -import logging -import os -import tempfile - -from datetime import timedelta - -from django.db.models import ( - BooleanField, - Count, - ExpressionWrapper, - Q, - Sum, -) -from django.http import FileResponse, HttpResponse -from django.views import View - -from home.models import Account, Contest, DailyWalk - -logger = logging.getLogger(__name__) - -# configure the base CSV headers -CSV_COLUMNS = [ - {"name": "Participant Name", "id": "name"}, - {"name": "Date Enrolled", "id": "created"}, - {"name": "Email", "id": "email"}, - {"name": "Zip Code", "id": "zip"}, - {"name": "Sexual Orientation", "id": "sexual_orien"}, - {"name": "Sexual Orientation Other", "id": "sexual_orien_other"}, - {"name": "Gender Identity", "id": "gender"}, - {"name": "Gender Identity Other", "id": "gender_other"}, - {"name": "Race", "id": "race"}, - {"name": "Race Other", "id": "race_other"}, - {"name": "Is Latino", "id": "is_latino"}, - {"name": "Age", "id": "age"}, - {"name": "Is New Signup", "id": "is_new"}, - {"name": "Active During Contest", "id": "is_active"}, - {"name": "Total Daily Walks During Contest", "id": "dw_contest_count"}, - { - "name": "Total Daily Walks During Baseline", - "id": "dw_baseline_count", - }, - {"name": "Total Steps During Contest", "id": "dw_contest_steps"}, - {"name": "Total Steps During Baseline", "id": "dw_baseline_steps"}, - { - "name": "Total Recorded Walks During Contest", - "id": "iw_contest_count", - }, - { - "name": "Total Recorded Walks During Baseline", - "id": "iw_baseline_count", - }, - { - "name": "Total Recorded Steps During Contest", - "id": "iw_contest_steps", - }, - { - "name": "Total Recorded Steps During Baseline", - "id": "iw_baseline_steps", - }, - { - "name": "Total Recorded Walk Time During Contest", - "id": "iw_contest_time", - }, - { - "name": "Total Recorded Walk Time During Baseline", - "id": "iw_baseline_time", - }, -] - - -def get_dailywalk_stats(name, ids, dailywalk_filter): - filters = Q(id__in=ids) - values = ["id"] - annotate = { - f"dw_{name}_count": Count("dailywalk", filter=dailywalk_filter), - f"dw_{name}_steps": Sum("dailywalk__steps", filter=dailywalk_filter), - f"dw_{name}_distance": Sum( - "dailywalk__distance", filter=dailywalk_filter - ), - } - order_by = ["id"] - return ( - Account.objects.filter(filters) - .values(*values) - .annotate(**annotate) - .order_by(*order_by) - ) - - -def get_intentionalwalk_stats(name, ids, intentionalwalk_filter): - filters = Q(id__in=ids) - values = ["id"] - annotate = { - f"iw_{name}_count": Count( - "intentionalwalk", filter=intentionalwalk_filter - ), - f"iw_{name}_steps": Sum( - "intentionalwalk__steps", filter=intentionalwalk_filter - ), - f"iw_{name}_distance": Sum( - "intentionalwalk__distance", filter=intentionalwalk_filter - ), - f"iw_{name}_time": Sum( - "intentionalwalk__walk_time", filter=intentionalwalk_filter - ), - } - order_by = ["id"] - return ( - Account.objects.filter(filters) - .values(*values) - .annotate(**annotate) - .order_by(*order_by) - ) - - -def get_daily_walks(ids, contest): - filters = Q( - account_id__in=ids, date__range=(contest.start_baseline, contest.end) - ) - values = ["account_id", "date", "steps"] - order_by = ["account_id", "date"] - return ( - DailyWalk.objects.filter(filters).values(*values).order_by(*order_by) - ) - - -def export_contest_users_data(file, contest_id, is_tester): - # get the Contest object - contest = Contest.objects.get(pk=contest_id) - - # configure the CSV writer - fieldnames = [col["id"] for col in CSV_COLUMNS] - header = {col["id"]: col["name"] for col in CSV_COLUMNS} - # add headers for every day in the output range (start of baseline to end of contest) - for dt in range((contest.end - contest.start_baseline).days + 1): - date = contest.start_baseline + timedelta(days=dt) - fieldnames.append(str(date)) - header[str(date)] = str(date) - writer = csv.DictWriter(file, fieldnames=fieldnames, extrasaction="ignore") - writer.writerow(header) - - # query for the base attributes - filters = Q(contests__contest_id=contest_id, is_tester=is_tester) - values = [ - "id", - "created", - "name", - "email", - "age", - "zip", - "gender", - "gender_other", - "sexual_orien", - "sexual_orien_other", - "race", - "race_other", - "is_latino", - ] - annotate = { - "is_new": ExpressionWrapper( - Q( - created__gte=contest.start_promo, - created__lt=contest.end + timedelta(days=1), - ), - output_field=BooleanField(), - ), - } - order_by = ["id"] - results = ( - Account.objects.filter(filters) - .values(*values) - .annotate(**annotate) - .order_by(*order_by) - ) - - # set up to process in batches - offset = 0 - limit = 25 - total = results.count() - while offset < total: - ids = [] - rows = [] - for row in results[offset : offset + limit]: # noqa E203 - # convert race Set into a comma delimited string - row["race"] = ",".join(row["race"]) - # gather all rows and ids - rows.append(row) - ids.append(row["id"]) - offset = offset + limit - - # add in the baseline period dailywalk stats - dw_baseline_results = get_dailywalk_stats( - "baseline", - ids, - Q( - dailywalk__date__range=( - contest.start_baseline, - contest.start - timedelta(days=1), - ) - ), - ) - for i, row in enumerate(dw_baseline_results): - rows[i].update(row) - - # add in the contest period dailywalk stats - dw_contest_results = get_dailywalk_stats( - "contest", - ids, - Q(dailywalk__date__range=(contest.start, contest.end)), - ) - for i, row in enumerate(dw_contest_results): - rows[i].update(row) - - # add in the baseline period intentionalwalk stats - iw_baseline_results = get_intentionalwalk_stats( - "baseline", - ids, - Q( - intentionalwalk__start__gte=contest.start_baseline, - intentionalwalk__start__lt=contest.start, - ), - ) - for i, row in enumerate(iw_baseline_results): - rows[i].update(row) - - # add in the contest period intentionalwalk stats - iw_contest_results = get_intentionalwalk_stats( - "contest", - ids, - Q( - intentionalwalk__start__gte=contest.start, - intentionalwalk__start__lt=contest.end + timedelta(days=1), - ), - ) - for i, row in enumerate(iw_contest_results): - rows[i].update(row) - # at this point, we have enough info to determine if user is "active" - rows[i]["is_active"] = ( - rows[i]["dw_contest_count"] > 0 - or rows[i]["iw_contest_count"] > 0 - ) - - # now add in every day of step data for each user - daily_walks = get_daily_walks(ids, contest) - rows_iter = iter(rows) - account = next(rows_iter) - for row in daily_walks: - while row["account_id"] != account["id"]: - account = next(rows_iter) - account[str(row["date"])] = row["steps"] - - # finally, write it out to the CSV...! - writer.writerows(rows) - - -class ExportUsersView(View): - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - contest_id = request.GET.get("contest_id", None) - is_tester = request.GET.get("is_tester", None) == "true" - - if not contest_id: - return HttpResponse(status=422) - elif not request.user.is_authenticated: - return HttpResponse(status=401) - - try: - tmp_file = tempfile.NamedTemporaryFile(delete=False) - with open(tmp_file.name, "w") as file: - export_contest_users_data(file, contest_id, is_tester) - return FileResponse( - open(tmp_file.name, "rb"), - as_attachment=True, - filename="users_agg.csv", - ) - finally: - os.remove(tmp_file.name) +import csv +import logging +import os +import tempfile + +from datetime import timedelta + +from django.db.models import ( + BooleanField, + Count, + ExpressionWrapper, + Q, + Sum, +) +from django.http import FileResponse, HttpResponse +from django.views import View + +from home.models import Account, Contest, DailyWalk + +logger = logging.getLogger(__name__) + +# configure the base CSV headers +CSV_COLUMNS = [ + {"name": "Participant Name", "id": "name"}, + {"name": "Date Enrolled", "id": "created"}, + {"name": "Email", "id": "email"}, + {"name": "Zip Code", "id": "zip"}, + {"name": "Sexual Orientation", "id": "sexual_orien"}, + {"name": "Sexual Orientation Other", "id": "sexual_orien_other"}, + {"name": "Gender Identity", "id": "gender"}, + {"name": "Gender Identity Other", "id": "gender_other"}, + {"name": "Race", "id": "race"}, + {"name": "Race Other", "id": "race_other"}, + {"name": "Is Latino", "id": "is_latino"}, + {"name": "Age", "id": "age"}, + {"name": "Is New Signup", "id": "is_new"}, + {"name": "Active During Contest", "id": "is_active"}, + {"name": "Total Daily Walks During Contest", "id": "dw_contest_count"}, + { + "name": "Total Daily Walks During Baseline", + "id": "dw_baseline_count", + }, + {"name": "Total Steps During Contest", "id": "dw_contest_steps"}, + {"name": "Total Steps During Baseline", "id": "dw_baseline_steps"}, + { + "name": "Total Recorded Walks During Contest", + "id": "iw_contest_count", + }, + { + "name": "Total Recorded Walks During Baseline", + "id": "iw_baseline_count", + }, + { + "name": "Total Recorded Steps During Contest", + "id": "iw_contest_steps", + }, + { + "name": "Total Recorded Steps During Baseline", + "id": "iw_baseline_steps", + }, + { + "name": "Total Recorded Walk Time During Contest", + "id": "iw_contest_time", + }, + { + "name": "Total Recorded Walk Time During Baseline", + "id": "iw_baseline_time", + }, +] + + +def get_dailywalk_stats(name, ids, dailywalk_filter): + filters = Q(id__in=ids) + values = ["id"] + annotate = { + f"dw_{name}_count": Count("dailywalk", filter=dailywalk_filter), + f"dw_{name}_steps": Sum("dailywalk__steps", filter=dailywalk_filter), + f"dw_{name}_distance": Sum( + "dailywalk__distance", filter=dailywalk_filter + ), + } + order_by = ["id"] + return ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + + +def get_intentionalwalk_stats(name, ids, intentionalwalk_filter): + filters = Q(id__in=ids) + values = ["id"] + annotate = { + f"iw_{name}_count": Count( + "intentionalwalk", filter=intentionalwalk_filter + ), + f"iw_{name}_steps": Sum( + "intentionalwalk__steps", filter=intentionalwalk_filter + ), + f"iw_{name}_distance": Sum( + "intentionalwalk__distance", filter=intentionalwalk_filter + ), + f"iw_{name}_time": Sum( + "intentionalwalk__walk_time", filter=intentionalwalk_filter + ), + } + order_by = ["id"] + return ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + + +def get_daily_walks(ids, contest): + filters = Q( + account_id__in=ids, date__range=(contest.start_baseline, contest.end) + ) + values = ["account_id", "date", "steps"] + order_by = ["account_id", "date"] + return ( + DailyWalk.objects.filter(filters).values(*values).order_by(*order_by) + ) + + +def export_contest_users_data(file, contest_id, is_tester): + # get the Contest object + contest = Contest.objects.get(pk=contest_id) + + # configure the CSV writer + fieldnames = [col["id"] for col in CSV_COLUMNS] + header = {col["id"]: col["name"] for col in CSV_COLUMNS} + # add headers for every day in the output range (start of baseline to end of contest) + for dt in range((contest.end - contest.start_baseline).days + 1): + date = contest.start_baseline + timedelta(days=dt) + fieldnames.append(str(date)) + header[str(date)] = str(date) + writer = csv.DictWriter(file, fieldnames=fieldnames, extrasaction="ignore") + writer.writerow(header) + + # query for the base attributes + filters = Q(contests__contest_id=contest_id, is_tester=is_tester) + values = [ + "id", + "created", + "name", + "email", + "age", + "zip", + "gender", + "gender_other", + "sexual_orien", + "sexual_orien_other", + "race", + "race_other", + "is_latino", + ] + annotate = { + "is_new": ExpressionWrapper( + Q( + created__gte=contest.start_promo, + created__lt=contest.end + timedelta(days=1), + ), + output_field=BooleanField(), + ), + } + order_by = ["id"] + results = ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + + # set up to process in batches + offset = 0 + limit = 25 + total = results.count() + while offset < total: + ids = [] + rows = [] + for row in results[offset : offset + limit]: # noqa E203 + # convert race Set into a comma delimited string + row["race"] = ",".join(row["race"]) + # gather all rows and ids + rows.append(row) + ids.append(row["id"]) + offset = offset + limit + + # add in the baseline period dailywalk stats + dw_baseline_results = get_dailywalk_stats( + "baseline", + ids, + Q( + dailywalk__date__range=( + contest.start_baseline, + contest.start - timedelta(days=1), + ) + ), + ) + for i, row in enumerate(dw_baseline_results): + rows[i].update(row) + + # add in the contest period dailywalk stats + dw_contest_results = get_dailywalk_stats( + "contest", + ids, + Q(dailywalk__date__range=(contest.start, contest.end)), + ) + for i, row in enumerate(dw_contest_results): + rows[i].update(row) + + # add in the baseline period intentionalwalk stats + iw_baseline_results = get_intentionalwalk_stats( + "baseline", + ids, + Q( + intentionalwalk__start__gte=contest.start_baseline, + intentionalwalk__start__lt=contest.start, + ), + ) + for i, row in enumerate(iw_baseline_results): + rows[i].update(row) + + # add in the contest period intentionalwalk stats + iw_contest_results = get_intentionalwalk_stats( + "contest", + ids, + Q( + intentionalwalk__start__gte=contest.start, + intentionalwalk__start__lt=contest.end + timedelta(days=1), + ), + ) + for i, row in enumerate(iw_contest_results): + rows[i].update(row) + # at this point, we have enough info to determine if user is "active" + rows[i]["is_active"] = ( + rows[i]["dw_contest_count"] > 0 + or rows[i]["iw_contest_count"] > 0 + ) + + # now add in every day of step data for each user + daily_walks = get_daily_walks(ids, contest) + rows_iter = iter(rows) + account = next(rows_iter) + for row in daily_walks: + while row["account_id"] != account["id"]: + account = next(rows_iter) + account[str(row["date"])] = row["steps"] + + # finally, write it out to the CSV...! + writer.writerows(rows) + + +class ExportUsersView(View): + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + contest_id = request.GET.get("contest_id", None) + is_tester = request.GET.get("is_tester", None) == "true" + + if not contest_id: + return HttpResponse(status=422) + elif not request.user.is_authenticated: + return HttpResponse(status=401) + + try: + tmp_file = tempfile.NamedTemporaryFile(delete=False) + with open(tmp_file.name, "w") as file: + export_contest_users_data(file, contest_id, is_tester) + return FileResponse( + open(tmp_file.name, "rb"), + as_attachment=True, + filename="users_agg.csv", + ) + finally: + os.remove(tmp_file.name) diff --git a/home/views/api/histogram/serializers.py b/home/views/api/histogram/serializers.py index a705f8fc..ec73e07e 100644 --- a/home/views/api/histogram/serializers.py +++ b/home/views/api/histogram/serializers.py @@ -1,508 +1,510 @@ -from datetime import date, datetime, time -from rest_framework import serializers -from django.db.models import ( - Max, - Min, - Q, - F, - Count, - ExpressionWrapper, - IntegerField, - QuerySet, - When, - Value, - Case, -) -from django.db.models.functions import Floor -from typing import Dict, Any, Optional, TypedDict, List -from home.models.contest import Contest -from home.models.dailywalk import DailyWalk -from home.models.intentionalwalk import IntentionalWalk -from home.models.leaderboard import Leaderboard -from home.models.account import Account -from django.db import models as djmodel -from django.utils.timezone import make_aware - - -class ValidatedHistogramReq(TypedDict): - query_set: QuerySet - # bin_size is provided if the bins were - # generated with equal bin sizes, - # otherwise the bin_custom is provided. - bin_size: Optional[int] - bin_custom: List[int] - bin_count: int - unit: str - - -class HistogramReqSerializer(serializers.Serializer): - """HistogramReqSerializer is a serializer for the histogram API. - - This serializer is used to validate and prepare the incoming request data - and prepare the annotations and filters for the histogram query. - This generates a histogram based on the bin_count or bin_size, - or custom bin intervals. - and the field of certain models to group by. - """ - - supported_fields = { - Leaderboard: ["steps"], - DailyWalk: ["steps", "distance"], - IntentionalWalk: ["steps", "distance"], - Account: ["age"], - } - - field_units = { - "steps": "steps", - "distance": "miles", - "age": "years", - } - - field = serializers.CharField( - required=True, help_text="The field to group by" - ) - - contest_id = serializers.CharField( - required=False, - help_text="The ID of the contest to filter by." - + "This field is mutually exclusive with the date fields." - + "For distance and step metrics, this will restrict the records" - + "to the values recorded during the contest period's start and end date." - + "For account metrics, this will restrict the records to the accounts that participated in the contest.", - ) - is_tester = serializers.BooleanField( - required=False, - help_text="If true, will only return records related to tester accounts.", - ) - - bin_size = serializers.IntegerField( - required=False, - help_text="The size of the bin to group the data by. Units will be the same as the field." - + "Note this is mutually exclusive with the bin_count and bin_custom field.", - ) - - bin_count = serializers.IntegerField( - required=False, - help_text="The number of bins to group the data by." - + "Note this is mutually exclusive with the bin_size and bin_custom field.", - ) - - bin_custom = serializers.CharField( - required=False, - help_text="A list of comma separated custom bin sizes in increasing order to group the data by." - + "Example: 0,18,29,44,59" - + "Note this is mutually exclusive with the bin_size and bin_count fields.", - ) - - # Date fields to filter by, inclusive. - # These fields are mutually exclusive with the contest_id field. - start_date = serializers.DateField( - required=False, help_text="The start date to filter the records by." - ) - end_date = serializers.DateField( - required=False, help_text="The end date to filter the records by." - ) - - def __init__(self, *args, model=None, **kwargs): - super().__init__(*args, **kwargs) - self.model: djmodel.Model = model - self.__unit: str = None - - def validate(self, data: Dict[str, Any]) -> ValidatedHistogramReq: - """Validates and prepares the incoming request data. - - Converts the request params into FilterSet params and annotations. - """ - model = self.model - if not model: - raise serializers.ValidationError( - {"non_field_errors": "Model is required."} - ) - - bin_size: Optional[int] = data.get("bin_size") - bin_count: Optional[int] = data.get("bin_count") - bin_custom_str: Optional[str] = data.get("bin_custom", "") - bin_custom = [] - try: - bin_custom: List[int] = [ - int(v) for v in bin_custom_str.split(",") if v - ] - except ValueError: - raise serializers.ValidationError( - { - "bin_custom": f"bin_custom could not be parsed: {bin_custom_str}" - } - ) - field: str = data.get("field") - contest_id: Optional[int] = data.get("contest_id") - is_tester: Optional[bool] = data.get("is_tester") - start_date: Optional[date] = data.get("start_date") - end_date: Optional[date] = data.get("end_date") - - if sum((bool(x) for x in (bin_size, bin_count, bin_custom))) > 1: - raise serializers.ValidationError( - { - "non_field_errors": "bin_size, bin_count and bin_custom are mutually exclusive." - } - ) - - if not bin_size and not bin_count and not bin_custom: - raise serializers.ValidationError( - { - "non_field_errors": "bin_size, bin_count, or bin_custom is required." - } - ) - - if bin_size and bin_size <= 0: - raise serializers.ValidationError( - {"bin_size": "bin_size must be greater than 0."} - ) - - if bin_count and bin_count < 2: - raise serializers.ValidationError( - {"bin_count": "bin_count must be greater than 1."} - ) - - if bin_custom: - increasing = all(a < b for a, b in zip(bin_custom, bin_custom[1:])) - if not increasing: - raise serializers.ValidationError( - { - "bin_custom": "bin_custom values must be in increasing order." - } - ) - if not all([x >= 0 for x in bin_custom]): - raise serializers.ValidationError( - {"bin_custom": "bin_custom values must be positive."} - ) - - valid_fields = self.supported_fields.get(model, []) - if field not in valid_fields: - raise serializers.ValidationError( - { - "non_field_errors": f"{field} is not supported for {model}. Please use one of {valid_fields}." - } - ) - - self.__unit = self.field_units.get(field) - - if contest_id and (start_date or end_date): - raise serializers.ValidationError( - { - "non_field_errors": "contest_id and start_date/end_date are mutually exclusive." - } - ) - - contest = None - if contest_id: - try: - contest = Contest.objects.get(contest_id=contest_id) - except Contest.DoesNotExist: - raise serializers.ValidationError( - { - "contest_id": f"Contest with id {contest_id} does not exist." - } - ) - - return self.create_bin_query( - model=model, - field=field, - is_tester=is_tester, - contest=contest, - start_date=start_date, - end_date=end_date, - bin_count=bin_count, - bin_size=bin_size, - bin_custom=bin_custom, - ) - - @property - def unit(self): - return self.__unit - - def create_bin_query( - self, - model: djmodel.Model, - field: str, - is_tester: bool, - contest: Contest, - start_date: str, - end_date: str, - bin_count: int, - bin_size: int, - bin_custom: List[int], - ) -> ValidatedHistogramReq: - """Handle bin_count generates a histogram based on the bin_count.""" - date_filter: Q = self.get_date_filter( - model=model, - start_date=start_date, - end_date=end_date, - contest=contest, - is_tester=is_tester, - ) - - range = model.objects.filter(date_filter).aggregate( - max_value=Max(field), - min_value=Min(field), - ) - - upper, lower = range.get("max_value"), range.get("min_value") - if not upper or not lower: - # no data was found in the range. - return { - "query_set": model.objects.none(), - "bin_count": bin_count, - "bin_custom": bin_custom, - "bin_size": bin_size, - "unit": self.unit, - } - - if bin_count: - # Using an offset because dividing by N equal parts - # will result in N+1 bins. - # Ex: Data ranging from 0 to 11 and divide it into two bins. - # 11 // 2 = 5, results in 3 bins: 0-5, 5-10, 10-15 to capture the 11. - offset = 1 - bin_size = (upper - 0) // (bin_count - offset) - - # Prevent division by zero when the - # range is less than the bin_count itself. - # We'll just drop everything into one bin. - bin_size = max(bin_size, 1) - elif bin_size: - # Using a lower bound is an optimization - # to reduce the search space for the query - # for performant queries on larger or sparse datasets. - bin_count = (upper - lower) // bin_size - - if bin_custom: - # For custom bins, we'll create a case statement. - cases = [ - When( - Q( - **{ - f"{field}__gte": bin_start, - f"{field}__lt": bin_end, - } - ), - then=Value(bin_start), - ) - for bin_start, bin_end in zip(bin_custom, bin_custom[1:]) - ] - - # SQL equivalent: - # SELECT - # CASE - # WHEN >= AND < THEN - # ... - # WHEN >= AND < THEN - # END AS bin_start, - # END AS bin_start, - # COUNT(*) AS count - # FROM - # JOIN ON .id = ._id # (if necessary) - # WHERE - # GROUP BY bin_start - # ORDER BY bin_start - last_bin = Value(bin_custom[-1]) - query_set = ( - model.objects.filter(date_filter) - .annotate( - bin_start=Case( - *cases, default=last_bin, output_field=IntegerField() - ), - ) - .values("bin_start") - .annotate(count=Count("*")) - .order_by("bin_start") - ) - - idx_lookup = { - bin_start: idx for idx, bin_start in enumerate(bin_custom) - } - - def get_bin_end(bin_start): - idx_of_end = idx_lookup[bin_start] + 1 - if idx_of_end >= len(bin_custom): - return upper - return bin_custom[idx_of_end] - - def get_bin_idx(bin_start): - return idx_lookup[bin_start] - - query_set = ( - { - "bin_start": bin["bin_start"], - "bin_end": get_bin_end(bin["bin_start"]), - "bin_idx": get_bin_idx(bin["bin_start"]), - "count": bin["count"], - } - for bin in query_set - ) - - return { - "query_set": query_set, - "bin_count": len(bin_custom), - "bin_custom": bin_custom, - "unit": self.unit, - } - - # For equal bins, we'll use the bin_size to generate the bins. - - # SQL equivalent: - # SELECT - # FLOOR( / ) AS bin_idx, - # FLOOR( / ) * AS bin_start, - # FLOOR( / ) * + AS bin_end, - # COUNT(*) AS count, - # FROM - # JOIN ON .id = ._id # (if necessary) - # WHERE - # GROUP BY bin - # ORDER BY bin - query_set = ( - model.objects.filter(date_filter) - .annotate( - bin_idx=ExpressionWrapper( - Floor(F(field) / bin_size), output_field=IntegerField() - ), - bin_start=ExpressionWrapper( - Floor(F(field) / bin_size) * bin_size, - output_field=IntegerField(), - ), - bin_end=ExpressionWrapper( - Floor(F(field) / bin_size) * bin_size + bin_size, - output_field=IntegerField(), - ), - ) - .values("bin_idx", "bin_start", "bin_end") - .annotate(count=Count("*")) - .order_by("bin_idx") - ) - - return { - "query_set": query_set, - "bin_size": bin_size, - "bin_count": bin_count, - "unit": self.unit, - } - - def get_date_filter( - self, - model: djmodel.Model, - contest: Optional[Contest], - start_date: Optional[date], - end_date: Optional[date], - is_tester: Optional[bool], - ) -> Q: - """Generates the correct filters for the given model. - - This is needed since the columns for dates are inconsistent across models, - and different depending on relationships. - """ - kwargs = {} - if model is Leaderboard: - if not contest: - raise serializers.ValidationError( - { - "contest_id": "contest_id is required for Leaderboard model." - } - ) - if start_date or end_date: - raise serializers.ValidationError( - { - "non_field_errors": "start_date and end_date is not supported for the Leaderboard model." - } - ) - - kwargs = { - # This looks wrong, but yes - the `contest` model *actually* - # does stutter; - # It is home_contest.contest_id. - "contest__contest_id": contest.contest_id if contest else None, - "account__is_tester": is_tester, - } - elif model is Account: - # NOTE: - # For accounts, the "contest_id" and - # "start_date" and "end_date" will refer - # not to the account's creation date, - # but their participation in any walk during that contest period - # OR the date range. - - if contest: - # Attach time and timezone to the dates, - # because in the database, the dates are stored as - # date objects with no time and timezone. - # We'll use the beginning of the start_date day, - # and the end of the end_date day to capture all records - start_date = make_aware( - datetime.combine( - contest.start_baseline or contest.start, time.min - ) - ) - end_date = make_aware(datetime.combine(contest.end, time.max)) - - # SQL equivalent: - # SELECT * FROM account WHERE id IN ( - # SELECT account_id FROM intentional_walk - # WHERE start_date BETWEEN AND - # AND end_date BETWEEN AND - # UNION - # SELECT account_id FROM daily_walk - # WHERE date BETWEEN contest.start_date AND contest.end_date - # AND end_date BETWEEN contest.start_date AND contest.end_date - # ) - iw_kwargs = { - k: v - for k, v in { - "start__lte": end_date, - "start__gte": start_date, - "end__lte": end_date, - "end__gte": start_date, - }.items() - if v is not None - } - dw_kwargs = { - k: v - for k, v in { - "date__lte": end_date, - "date__gte": start_date, - }.items() - if v is not None - } - - kwargs = {"is_tester": is_tester} - if iw_kwargs or dw_kwargs: - account_walkers = ( - IntentionalWalk.objects.filter(**iw_kwargs) - .values("account_id") - .distinct() - .union( - DailyWalk.objects.filter(**dw_kwargs) - .values("account_id") - .distinct() - ) - ) - kwargs["id__in"] = account_walkers - elif model is DailyWalk: - kwargs = { - "date__lte": end_date, - "date__gte": start_date, - "account__is_tester": is_tester, - } - elif model is IntentionalWalk: - kwargs = { - "start__lte": end_date, - "start__gte": start_date, - "end__lte": end_date, - "end__gte": start_date, - "account__is_tester": is_tester, - } - else: - raise serializers.ValidationError( - {"non_field_errors": f"{model} is not yet supported."} - ) - # Remove None values from the kwargs, - # as they indicate optional fields that were not provided. - return Q(**{k: v for k, v in kwargs.items() if v is not None}) +from datetime import date, datetime, time +from typing import Any, Dict, List, Optional, TypedDict + +from django.db import models as djmodel +from django.db.models import ( + Case, + Count, + ExpressionWrapper, + F, + IntegerField, + Max, + Min, + Q, + QuerySet, + Value, + When, +) +from django.db.models.functions import Floor +from django.utils.timezone import make_aware +from rest_framework import serializers + +from home.models.account import Account +from home.models.contest import Contest +from home.models.dailywalk import DailyWalk +from home.models.intentionalwalk import IntentionalWalk +from home.models.leaderboard import Leaderboard + + +class ValidatedHistogramReq(TypedDict): + query_set: QuerySet + # bin_size is provided if the bins were + # generated with equal bin sizes, + # otherwise the bin_custom is provided. + bin_size: Optional[int] + bin_custom: List[int] + bin_count: int + unit: str + + +class HistogramReqSerializer(serializers.Serializer): + """HistogramReqSerializer is a serializer for the histogram API. + + This serializer is used to validate and prepare the incoming request data + and prepare the annotations and filters for the histogram query. + This generates a histogram based on the bin_count or bin_size, + or custom bin intervals. + and the field of certain models to group by. + """ + + supported_fields = { + Leaderboard: ["steps"], + DailyWalk: ["steps", "distance"], + IntentionalWalk: ["steps", "distance"], + Account: ["age"], + } + + field_units = { + "steps": "steps", + "distance": "miles", + "age": "years", + } + + field = serializers.CharField( + required=True, help_text="The field to group by" + ) + + contest_id = serializers.CharField( + required=False, + help_text="The ID of the contest to filter by." + + "This field is mutually exclusive with the date fields." + + "For distance and step metrics, this will restrict the records" + + "to the values recorded during the contest period's start and end date." + + "For account metrics, this will restrict the records to the accounts that participated in the contest.", + ) + is_tester = serializers.BooleanField( + required=False, + help_text="If true, will only return records related to tester accounts.", + ) + + bin_size = serializers.IntegerField( + required=False, + help_text="The size of the bin to group the data by. Units will be the same as the field." + + "Note this is mutually exclusive with the bin_count and bin_custom field.", + ) + + bin_count = serializers.IntegerField( + required=False, + help_text="The number of bins to group the data by." + + "Note this is mutually exclusive with the bin_size and bin_custom field.", + ) + + bin_custom = serializers.CharField( + required=False, + help_text="A list of comma separated custom bin sizes in increasing order to group the data by." + + "Example: 0,18,29,44,59" + + "Note this is mutually exclusive with the bin_size and bin_count fields.", + ) + + # Date fields to filter by, inclusive. + # These fields are mutually exclusive with the contest_id field. + start_date = serializers.DateField( + required=False, help_text="The start date to filter the records by." + ) + end_date = serializers.DateField( + required=False, help_text="The end date to filter the records by." + ) + + def __init__(self, *args, model=None, **kwargs): + super().__init__(*args, **kwargs) + self.model: djmodel.Model = model + self.__unit: str = None + + def validate(self, data: Dict[str, Any]) -> ValidatedHistogramReq: + """Validates and prepares the incoming request data. + + Converts the request params into FilterSet params and annotations. + """ + model = self.model + if not model: + raise serializers.ValidationError( + {"non_field_errors": "Model is required."} + ) + + bin_size: Optional[int] = data.get("bin_size") + bin_count: Optional[int] = data.get("bin_count") + bin_custom_str: Optional[str] = data.get("bin_custom", "") + bin_custom = [] + try: + bin_custom: List[int] = [ + int(v) for v in bin_custom_str.split(",") if v + ] + except ValueError: + raise serializers.ValidationError( + { + "bin_custom": f"bin_custom could not be parsed: {bin_custom_str}" + } + ) + field: str = data.get("field") + contest_id: Optional[int] = data.get("contest_id") + is_tester: Optional[bool] = data.get("is_tester") + start_date: Optional[date] = data.get("start_date") + end_date: Optional[date] = data.get("end_date") + + if sum((bool(x) for x in (bin_size, bin_count, bin_custom))) > 1: + raise serializers.ValidationError( + { + "non_field_errors": "bin_size, bin_count and bin_custom are mutually exclusive." + } + ) + + if not bin_size and not bin_count and not bin_custom: + raise serializers.ValidationError( + { + "non_field_errors": "bin_size, bin_count, or bin_custom is required." + } + ) + + if bin_size and bin_size <= 0: + raise serializers.ValidationError( + {"bin_size": "bin_size must be greater than 0."} + ) + + if bin_count and bin_count < 2: + raise serializers.ValidationError( + {"bin_count": "bin_count must be greater than 1."} + ) + + if bin_custom: + increasing = all(a < b for a, b in zip(bin_custom, bin_custom[1:])) + if not increasing: + raise serializers.ValidationError( + { + "bin_custom": "bin_custom values must be in increasing order." + } + ) + if not all([x >= 0 for x in bin_custom]): + raise serializers.ValidationError( + {"bin_custom": "bin_custom values must be positive."} + ) + + valid_fields = self.supported_fields.get(model, []) + if field not in valid_fields: + raise serializers.ValidationError( + { + "non_field_errors": f"{field} is not supported for {model}. Please use one of {valid_fields}." + } + ) + + self.__unit = self.field_units.get(field) + + if contest_id and (start_date or end_date): + raise serializers.ValidationError( + { + "non_field_errors": "contest_id and start_date/end_date are mutually exclusive." + } + ) + + contest = None + if contest_id: + try: + contest = Contest.objects.get(contest_id=contest_id) + except Contest.DoesNotExist: + raise serializers.ValidationError( + { + "contest_id": f"Contest with id {contest_id} does not exist." + } + ) + + return self.create_bin_query( + model=model, + field=field, + is_tester=is_tester, + contest=contest, + start_date=start_date, + end_date=end_date, + bin_count=bin_count, + bin_size=bin_size, + bin_custom=bin_custom, + ) + + @property + def unit(self): + return self.__unit + + def create_bin_query( + self, + model: djmodel.Model, + field: str, + is_tester: bool, + contest: Contest, + start_date: str, + end_date: str, + bin_count: int, + bin_size: int, + bin_custom: List[int], + ) -> ValidatedHistogramReq: + """Handle bin_count generates a histogram based on the bin_count.""" + date_filter: Q = self.get_date_filter( + model=model, + start_date=start_date, + end_date=end_date, + contest=contest, + is_tester=is_tester, + ) + + range = model.objects.filter(date_filter).aggregate( + max_value=Max(field), + min_value=Min(field), + ) + + upper, lower = range.get("max_value"), range.get("min_value") + if not upper or not lower: + # no data was found in the range. + return { + "query_set": model.objects.none(), + "bin_count": bin_count, + "bin_custom": bin_custom, + "bin_size": bin_size, + "unit": self.unit, + } + + if bin_count: + # Using an offset because dividing by N equal parts + # will result in N+1 bins. + # Ex: Data ranging from 0 to 11 and divide it into two bins. + # 11 // 2 = 5, results in 3 bins: 0-5, 5-10, 10-15 to capture the 11. + offset = 1 + bin_size = (upper - 0) // (bin_count - offset) + + # Prevent division by zero when the + # range is less than the bin_count itself. + # We'll just drop everything into one bin. + bin_size = max(bin_size, 1) + elif bin_size: + # Using a lower bound is an optimization + # to reduce the search space for the query + # for performant queries on larger or sparse datasets. + bin_count = (upper - lower) // bin_size + + if bin_custom: + # For custom bins, we'll create a case statement. + cases = [ + When( + Q( + **{ + f"{field}__gte": bin_start, + f"{field}__lt": bin_end, + } + ), + then=Value(bin_start), + ) + for bin_start, bin_end in zip(bin_custom, bin_custom[1:]) + ] + + # SQL equivalent: + # SELECT + # CASE + # WHEN >= AND < THEN + # ... + # WHEN >= AND < THEN + # END AS bin_start, + # END AS bin_start, + # COUNT(*) AS count + # FROM + # JOIN ON .id = ._id # (if necessary) + # WHERE + # GROUP BY bin_start + # ORDER BY bin_start + last_bin = Value(bin_custom[-1]) + query_set = ( + model.objects.filter(date_filter) + .annotate( + bin_start=Case( + *cases, default=last_bin, output_field=IntegerField() + ), + ) + .values("bin_start") + .annotate(count=Count("*")) + .order_by("bin_start") + ) + + idx_lookup = { + bin_start: idx for idx, bin_start in enumerate(bin_custom) + } + + def get_bin_end(bin_start): + idx_of_end = idx_lookup[bin_start] + 1 + if idx_of_end >= len(bin_custom): + return upper + return bin_custom[idx_of_end] + + def get_bin_idx(bin_start): + return idx_lookup[bin_start] + + query_set = ( + { + "bin_start": bin["bin_start"], + "bin_end": get_bin_end(bin["bin_start"]), + "bin_idx": get_bin_idx(bin["bin_start"]), + "count": bin["count"], + } + for bin in query_set + ) + + return { + "query_set": query_set, + "bin_count": len(bin_custom), + "bin_custom": bin_custom, + "unit": self.unit, + } + + # For equal bins, we'll use the bin_size to generate the bins. + + # SQL equivalent: + # SELECT + # FLOOR( / ) AS bin_idx, + # FLOOR( / ) * AS bin_start, + # FLOOR( / ) * + AS bin_end, + # COUNT(*) AS count, + # FROM + # JOIN ON .id = ._id # (if necessary) + # WHERE + # GROUP BY bin + # ORDER BY bin + query_set = ( + model.objects.filter(date_filter) + .annotate( + bin_idx=ExpressionWrapper( + Floor(F(field) / bin_size), output_field=IntegerField() + ), + bin_start=ExpressionWrapper( + Floor(F(field) / bin_size) * bin_size, + output_field=IntegerField(), + ), + bin_end=ExpressionWrapper( + Floor(F(field) / bin_size) * bin_size + bin_size, + output_field=IntegerField(), + ), + ) + .values("bin_idx", "bin_start", "bin_end") + .annotate(count=Count("*")) + .order_by("bin_idx") + ) + + return { + "query_set": query_set, + "bin_size": bin_size, + "bin_count": bin_count, + "unit": self.unit, + } + + def get_date_filter( + self, + model: djmodel.Model, + contest: Optional[Contest], + start_date: Optional[date], + end_date: Optional[date], + is_tester: Optional[bool], + ) -> Q: + """Generates the correct filters for the given model. + + This is needed since the columns for dates are inconsistent across models, + and different depending on relationships. + """ + kwargs = {} + if model is Leaderboard: + if not contest: + raise serializers.ValidationError( + { + "contest_id": "contest_id is required for Leaderboard model." + } + ) + if start_date or end_date: + raise serializers.ValidationError( + { + "non_field_errors": "start_date and end_date is not supported for the Leaderboard model." + } + ) + + kwargs = { + # This looks wrong, but yes - the `contest` model *actually* + # does stutter; + # It is home_contest.contest_id. + "contest__contest_id": contest.contest_id if contest else None, + "account__is_tester": is_tester, + } + elif model is Account: + # NOTE: + # For accounts, the "contest_id" and + # "start_date" and "end_date" will refer + # not to the account's creation date, + # but their participation in any walk during that contest period + # OR the date range. + + if contest: + # Attach time and timezone to the dates, + # because in the database, the dates are stored as + # date objects with no time and timezone. + # We'll use the beginning of the start_date day, + # and the end of the end_date day to capture all records + start_date = make_aware( + datetime.combine( + contest.start_baseline or contest.start, time.min + ) + ) + end_date = make_aware(datetime.combine(contest.end, time.max)) + + # SQL equivalent: + # SELECT * FROM account WHERE id IN ( + # SELECT account_id FROM intentional_walk + # WHERE start_date BETWEEN AND + # AND end_date BETWEEN AND + # UNION + # SELECT account_id FROM daily_walk + # WHERE date BETWEEN contest.start_date AND contest.end_date + # AND end_date BETWEEN contest.start_date AND contest.end_date + # ) + iw_kwargs = { + k: v + for k, v in { + "start__lte": end_date, + "start__gte": start_date, + "end__lte": end_date, + "end__gte": start_date, + }.items() + if v is not None + } + dw_kwargs = { + k: v + for k, v in { + "date__lte": end_date, + "date__gte": start_date, + }.items() + if v is not None + } + + kwargs = {"is_tester": is_tester} + if iw_kwargs or dw_kwargs: + account_walkers = ( + IntentionalWalk.objects.filter(**iw_kwargs) + .values("account_id") + .distinct() + .union( + DailyWalk.objects.filter(**dw_kwargs) + .values("account_id") + .distinct() + ) + ) + kwargs["id__in"] = account_walkers + elif model is DailyWalk: + kwargs = { + "date__lte": end_date, + "date__gte": start_date, + "account__is_tester": is_tester, + } + elif model is IntentionalWalk: + kwargs = { + "start__lte": end_date, + "start__gte": start_date, + "end__lte": end_date, + "end__gte": start_date, + "account__is_tester": is_tester, + } + else: + raise serializers.ValidationError( + {"non_field_errors": f"{model} is not yet supported."} + ) + # Remove None values from the kwargs, + # as they indicate optional fields that were not provided. + return Q(**{k: v for k, v in kwargs.items() if v is not None}) diff --git a/home/views/api/intentionalwalk.py b/home/views/api/intentionalwalk.py index 1623c427..a0a356aa 100644 --- a/home/views/api/intentionalwalk.py +++ b/home/views/api/intentionalwalk.py @@ -1,182 +1,182 @@ -import json - -from django.core.exceptions import ObjectDoesNotExist -from django.http import JsonResponse -from django.utils.decorators import method_decorator -from django.views import View -from django.views.decorators.csrf import csrf_exempt - -from home.models import Device, IntentionalWalk - -from .utils import validate_request_json - - -@method_decorator(csrf_exempt, name="dispatch") -class IntentionalWalkView(View): - """View to create Intentional Walks""" - - model = IntentionalWalk - http_method_names = ["post"] - - def post(self, request, *args, **kwargs): - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response message - json_status = validate_request_json( - json_data, - required_fields=["account_id", "intentional_walks"], - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Get the device if already registered - try: - device = Device.objects.get(device_id=json_data["account_id"]) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered device - " - f"{json_data['account_id']}." - " Please register first!" - ), - } - ) - - json_response = { - "status": "success", - "message": "Intentional Walks recorded successfully", - "payload": { - "account_id": device.device_id, - "intentional_walks": [], - }, - } - - # Loop through all the intentional walks - for intentional_walk_data in json_data["intentional_walks"]: - - json_status = validate_request_json( - intentional_walk_data, - required_fields=[ - "event_id", - "start", - "end", - "steps", - "pause_time", - "distance", - ], - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - try: - intentional_walk = IntentionalWalk.objects.create( - event_id=intentional_walk_data["event_id"], - start=intentional_walk_data["start"], - end=intentional_walk_data["end"], - steps=intentional_walk_data["steps"], - distance=intentional_walk_data["distance"], - pause_time=intentional_walk_data["pause_time"], - device=device, - ) - json_response["payload"]["intentional_walks"].append( - { - "event_id": intentional_walk.event_id, - "start": intentional_walk.start, - "end": intentional_walk.end, - "steps": intentional_walk.steps, - "distance": intentional_walk.distance, - "pause_time": intentional_walk.pause_time, - } - ) - except Exception: - # IntentionalWalk records are immutable- so ignore any errors - # that might occur if the record already exists, etc... - pass - - return JsonResponse(json_response) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) - - -@method_decorator(csrf_exempt, name="dispatch") -class IntentionalWalkListView(View): - """View to retrieve Intentional Walks""" - - model = IntentionalWalk - http_method_names = ["post"] - - def post(self, request, *args, **kwargs): - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response message - json_status = validate_request_json( - json_data, required_fields=["account_id"] - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Get the device if already registered - try: - device = Device.objects.get(device_id=json_data["account_id"]) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered device - " - f"{json_data['account_id']}." - " Please register first!" - ), - } - ) - - # Get walks from all the accounts tied to the email - intentional_walks = IntentionalWalk.objects.filter( - account=device.account - ) - - # Hacky serializer - total_steps = 0 - total_walk_time = 0 - total_pause_time = 0 - total_distance = 0 - intentional_walk_list = [] - for intentional_walk in intentional_walks: - intentional_walk_list.append( - { - "event_id": intentional_walk.event_id, - "start": intentional_walk.start, - "end": intentional_walk.end, - "steps": intentional_walk.steps, - "distance": intentional_walk.distance, - "walk_time": intentional_walk.walk_time, - "pause_time": intentional_walk.pause_time, - } - ) - total_steps += intentional_walk.steps - total_walk_time += intentional_walk.walk_time - total_distance += intentional_walk.distance - total_pause_time += intentional_walk.pause_time - intentional_walk_list = sorted( - intentional_walk_list, key=lambda x: x["start"], reverse=True - ) - payload = { - "intentional_walks": intentional_walk_list, - "total_steps": total_steps, - "total_walk_time": total_walk_time, - "total_pause_time": total_pause_time, - "total_distance": total_distance, - "status": "success", - } - - return JsonResponse(payload) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) +import json + +from django.core.exceptions import ObjectDoesNotExist +from django.http import JsonResponse +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from home.models import Device, IntentionalWalk + +from .utils import validate_request_json + + +@method_decorator(csrf_exempt, name="dispatch") +class IntentionalWalkView(View): + """View to create Intentional Walks""" + + model = IntentionalWalk + http_method_names = ["post"] + + def post(self, request, *args, **kwargs): + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response message + json_status = validate_request_json( + json_data, + required_fields=["account_id", "intentional_walks"], + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Get the device if already registered + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered device - " + f"{json_data['account_id']}." + " Please register first!" + ), + } + ) + + json_response = { + "status": "success", + "message": "Intentional Walks recorded successfully", + "payload": { + "account_id": device.device_id, + "intentional_walks": [], + }, + } + + # Loop through all the intentional walks + for intentional_walk_data in json_data["intentional_walks"]: + + json_status = validate_request_json( + intentional_walk_data, + required_fields=[ + "event_id", + "start", + "end", + "steps", + "pause_time", + "distance", + ], + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + try: + intentional_walk = IntentionalWalk.objects.create( + event_id=intentional_walk_data["event_id"], + start=intentional_walk_data["start"], + end=intentional_walk_data["end"], + steps=intentional_walk_data["steps"], + distance=intentional_walk_data["distance"], + pause_time=intentional_walk_data["pause_time"], + device=device, + ) + json_response["payload"]["intentional_walks"].append( + { + "event_id": intentional_walk.event_id, + "start": intentional_walk.start, + "end": intentional_walk.end, + "steps": intentional_walk.steps, + "distance": intentional_walk.distance, + "pause_time": intentional_walk.pause_time, + } + ) + except Exception: + # IntentionalWalk records are immutable- so ignore any errors + # that might occur if the record already exists, etc... + pass + + return JsonResponse(json_response) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) + + +@method_decorator(csrf_exempt, name="dispatch") +class IntentionalWalkListView(View): + """View to retrieve Intentional Walks""" + + model = IntentionalWalk + http_method_names = ["post"] + + def post(self, request, *args, **kwargs): + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response message + json_status = validate_request_json( + json_data, required_fields=["account_id"] + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Get the device if already registered + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered device - " + f"{json_data['account_id']}." + " Please register first!" + ), + } + ) + + # Get walks from all the accounts tied to the email + intentional_walks = IntentionalWalk.objects.filter( + account=device.account + ) + + # Hacky serializer + total_steps = 0 + total_walk_time = 0 + total_pause_time = 0 + total_distance = 0 + intentional_walk_list = [] + for intentional_walk in intentional_walks: + intentional_walk_list.append( + { + "event_id": intentional_walk.event_id, + "start": intentional_walk.start, + "end": intentional_walk.end, + "steps": intentional_walk.steps, + "distance": intentional_walk.distance, + "walk_time": intentional_walk.walk_time, + "pause_time": intentional_walk.pause_time, + } + ) + total_steps += intentional_walk.steps + total_walk_time += intentional_walk.walk_time + total_distance += intentional_walk.distance + total_pause_time += intentional_walk.pause_time + intentional_walk_list = sorted( + intentional_walk_list, key=lambda x: x["start"], reverse=True + ) + payload = { + "intentional_walks": intentional_walk_list, + "total_steps": total_steps, + "total_walk_time": total_walk_time, + "total_pause_time": total_pause_time, + "total_distance": total_distance, + "status": "success", + } + + return JsonResponse(payload) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) diff --git a/home/views/api/leaderboard.py b/home/views/api/leaderboard.py index d1f8c34f..d2e4e4ab 100644 --- a/home/views/api/leaderboard.py +++ b/home/views/api/leaderboard.py @@ -1,116 +1,111 @@ -from django.http import HttpResponse, JsonResponse -from django.utils.decorators import method_decorator -from django.views import View -from django.views.decorators.csrf import csrf_exempt -from django.db.models.functions import Rank -from django.db.models.expressions import Window -from django.db.models import F -from django.core.exceptions import ObjectDoesNotExist - - -from home.models import ( - Contest, - Device, - Leaderboard, -) - - -@method_decorator(csrf_exempt, name="dispatch") -# Dispatch? -class LeaderboardListView(View): - """View to retrieve leaderboard""" - - # model = Leaderboard - http_method_names = ["get"] - - def get(self, request, *args, **kwargs): - - contest_id = request.GET.get("contest_id") - device_id = request.GET.get("device_id") - - # Parse params - if contest_id is None: - return HttpResponse("No contest specified") - - current_contest = Contest.objects.filter(contest_id=contest_id) - if current_contest is None: - return JsonResponse( - { - "status": "error", - "message": "Contest not found", - } - ) - - # http://localhost:8000/api/leaderboard/ - # get?contest_id=?device_id= - - # Validate request. If any field is missing, - # send back the response message - # Get the device if already registered - device = None - try: - device = Device.objects.get(device_id=device_id) - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered device - " - "Please register first!" - "device_id:" - f"{device_id}" - ), - } - ) - - # Json response template - json_response = { - "status": "success", - "message": "Leaderboard accessed successfully", - "payload": { - "leaderboard": [], - }, - } - - leaderboard_list = [] - leaderboard_length = 10 - leaderboard = ( - Leaderboard.objects.filter( - contest_id=contest_id, - account__is_tester=device.account.is_tester, - ) - .values("account_id", "steps") - .annotate( - rank=Window(expression=Rank(), order_by=F("steps").desc()) - ) - ) - - # get top 10 - leaderboard_list = list(leaderboard[0:leaderboard_length]) - - # Check if user should be added after top 10 displayed - eleventh_place = True - for user in leaderboard_list: - if user["account_id"] == device.account.id: - user["device_id"] = device_id - eleventh_place = False - break - - # If user not in top 10, add as 11th in list - if eleventh_place: - leaderboard = Leaderboard.objects.filter( - contest_id=contest_id, account=device.account - ).values("account_id", "steps") - if len(leaderboard) > 0: - user = leaderboard[0] - user["device_id"] = device_id - user["rank"] = Leaderboard.objects.filter( - contest_id=contest_id, - steps__gte=user["steps"], - account__is_tester=device.account.is_tester, - ).count() - leaderboard_list.append(user) - - json_response["payload"]["leaderboard"] = leaderboard_list - - return JsonResponse(json_response) +from django.core.exceptions import ObjectDoesNotExist +from django.db.models import F +from django.db.models.expressions import Window +from django.db.models.functions import Rank +from django.http import HttpResponse, JsonResponse +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from home.models import Contest, Device, Leaderboard + + +@method_decorator(csrf_exempt, name="dispatch") +# Dispatch? +class LeaderboardListView(View): + """View to retrieve leaderboard""" + + # model = Leaderboard + http_method_names = ["get"] + + def get(self, request, *args, **kwargs): + + contest_id = request.GET.get("contest_id") + device_id = request.GET.get("device_id") + + # Parse params + if contest_id is None: + return HttpResponse("No contest specified") + + current_contest = Contest.objects.filter(contest_id=contest_id) + if not current_contest: + return JsonResponse( + { + "status": "error", + "message": "Contest not found", + } + ) + + # http://localhost:8000/api/leaderboard/ + # get?contest_id=?device_id= + + # Validate request. If any field is missing, + # send back the response message + # Get the device if already registered + device = None + try: + device = Device.objects.get(device_id=device_id) + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered device - " + "Please register first!" + "device_id:" + f"{device_id}" + ), + } + ) + + # Json response template + json_response = { + "status": "success", + "message": "Leaderboard accessed successfully", + "payload": { + "leaderboard": [], + }, + } + + leaderboard_list = [] + leaderboard_length = 10 + leaderboard = ( + Leaderboard.objects.filter( + contest_id=contest_id, + account__is_tester=device.account.is_tester, + ) + .values("account_id", "steps") + .annotate( + rank=Window(expression=Rank(), order_by=F("steps").desc()) + ) + ) + + # get top 10 + leaderboard_list = list(leaderboard[0:leaderboard_length]) + + # Check if user should be added after top 10 displayed + eleventh_place = True + for user in leaderboard_list: + if user["account_id"] == device.account.id: + user["device_id"] = device_id + eleventh_place = False + break + + # If user not in top 10, add as 11th in list + if eleventh_place: + leaderboard = Leaderboard.objects.filter( + contest_id=contest_id, account=device.account + ).values("account_id", "steps") + if len(leaderboard) > 0: + user = leaderboard[0] + user["device_id"] = device_id + user["rank"] = Leaderboard.objects.filter( + contest_id=contest_id, + steps__gte=user["steps"], + account__is_tester=device.account.is_tester, + ).count() + leaderboard_list.append(user) + + json_response["payload"]["leaderboard"] = leaderboard_list + + return JsonResponse(json_response) diff --git a/home/views/api/schemas/account.py b/home/views/api/schemas/account.py new file mode 100644 index 00000000..d102d506 --- /dev/null +++ b/home/views/api/schemas/account.py @@ -0,0 +1,145 @@ +from ninja import ModelSchema, Schema +from pydantic import field_validator +from typing import Optional + +from home.models.account import Account, SAN_FRANCISCO_ZIP_CODES +from home.models.device import Device + + +class DeviceSchema(ModelSchema): + class Meta: + model = Device + fields = ( + "device_id", + "account", + "created" + ) + + +# class AccountSchema(ModelSchema): +# account_id: str +# # is_test: bool +# # race = list + +# class Meta: +# model = Account +# fields = ( +# "name", +# "email", +# "zip", +# "age", +# "is_latino", +# "race", +# "race_other", +# "gender", +# "gender_other", +# "sexual_orien", +# "sexual_orien_other", +# ) + + # @validator("account_id") + # def redefine_account_id(cls, v): + # return v.device_id + +class AccountSchema(Schema): + account_id: str + name: str + email: str + zip: str + age: int + is_latino: Optional[str] = None + race: Optional[list] = None + race_other: Optional[str] = None + gender: Optional[str] = None + gender_other: Optional[str] = None + sexual_orien: Optional[str] = None + sexual_orien_other: Optional[str] = None + + # Check for valid zip code + @field_validator("zip") + def check_zip(cls, zipcode): + if zipcode not in SAN_FRANCISCO_ZIP_CODES: + raise ValueError("Invalid zip code") + # assert zipcode in SAN_FRANCISCO_ZIP_CODES, "Invalid zip code" + return zipcode + + # Check for valid age range + # @validator("age") + # def valid_ + # ): # Required field but existence checked in validate_request_json + # assert data["age"] > 1 and data["age"] < 200, "Invalid age" + # if data.get("is_latino") is not None: + # is_latino = data["is_latino"] + # assert ( + # is_latino in IsLatinoLabels.__members__ + # ), f"Invalid is latino or hispanic selection '{is_latino}'" + # if data.get("race") is not None: + # for item in data["race"]: + # assert ( + # item in RaceLabels.__members__ + # ), f"Invalid race selection '{item}'" + # if "OT" in data["race"]: + # assert ( + # len(data.get("race_other", "")) > 0 + # ), "Must specify 'other' race" + # else: + # assert ( + # data.get("race_other") is None + # ), "'race_other' should not be specified without race 'OT'" + # elif data.get("race_other") is not None: + # assert False, "'race_other' should not be specified without 'race'" + # if data.get("gender") is not None: + # gender = data["gender"] + # assert ( + # gender in GenderLabels.__members__ + # ), f"Invalid gender selection '{gender}'" + # if data["gender"] == "OT": + # assert ( + # len(data.get("gender_other", "")) > 0 + # ), "Must specify 'other' gender" + # else: + # assert ( + # data.get("gender_other") is None + # ), "'gender_other' should not be specified without 'OT'" + # elif data.get("gender_other") is not None: + # assert False, "'gender_other' should not be specified without 'gender'" + # if data.get("sexual_orien") is not None: + # sexual_orientation = data["sexual_orien"] + # assert ( + # sexual_orientation in SexualOrientationLabels.__members__ + # ), f"Invalid sexual orientation selection '{sexual_orientation}'" + # if data["sexual_orien"] == "OT": + # assert ( + # len(data.get("sexual_orien_other", "")) > 0 + # ), "Must specify 'other' sexual orientation" + # else: + # assert ( + # data.get("sexual_orien_other") is None + # ), "'sexual_orien_other' should not be specified without 'OT'" + # elif data.get("sexual_orien_other") is not None: + # assert ( + # False + # ), "'sexual_orien_other' should not be specified without 'gender'" + + + +# class AccountOut(ModelSchema): +# class Meta: +# model = Account +# fields = ( +# "name", +# "email", +# "zip", +# "age", +# "is_latino", +# "race", +# "race_other", +# "gender", +# "gender_other", +# "sexual_orien", +# "sexual_orien_other", +# ) + + +class Error(Schema): + message: str diff --git a/home/views/api/serializers/request_serializers.py b/home/views/api/serializers/request_serializers.py index fe635224..6cdccf82 100644 --- a/home/views/api/serializers/request_serializers.py +++ b/home/views/api/serializers/request_serializers.py @@ -1,143 +1,138 @@ -""" -This module contains serializers that are used for parsing and validating request data. - -Each serializer in this module corresponds to a specific API endpoint. The serializer's -`validate` method is responsible for validating the incoming request data and preparing -it for further processing. -""" - -from rest_framework import serializers -from datetime import timedelta -from home.models import Contest -from django.db.models import ( - BooleanField, - Count, - ExpressionWrapper, - F, - Q, - Sum, -) - - -class GetUsersReqSerializer(serializers.Serializer): - contest_id = serializers.CharField( - required=False, - help_text="The ID of the contest to filter by." - + "Providing this also will add additional metrics related to te contest.", - ) - # If true, will only return tester accounts. - is_tester = serializers.BooleanField( - required=False, help_text="If true, will only return tester accounts." - ) - # Choices are: age, contests, created, dailywalk, device, email, gender, gender_other, id, - # intentionalwalk, is_latino, is_sf_resident, is_tester, iw_count, iw_distance, iw_steps, - # iw_time, leaderboard, name, race, race_other, sexual_orien, sexual_orien_other, updated, - # weeklygoal, zip. - # TODO: Can move this to the choices field tuple. - # which will allow some tools to auto-pick up. - order_by = serializers.CharField( - required=False, - help_text="The field to order the results by. Prefix with '-' to order in descending order." - + "The secondary sort and default sort will be lexicographically, the 'name'.", - ) - page = serializers.IntegerField( - required=False, - help_text="The page number to return. Defaults to 1.", - default=1, - ) - query = serializers.CharField( - required=False, - help_text="Query string to filter for containment in the name or email.", - ) - - def validate(self, data): - """Validates and prepares the incoming request data. - - Converts the request params into FilterSet params and annotations. - """ - contest_id = data.get("contest_id") - is_tester = data.get("is_tester") - order_by = data.get("order_by") - page = data.get("page") or 1 - per_page = 25 - query = data.get("query") - - # filter and annotate based on contest_id - filters, annotate, intentionalwalk_filter = None, None, None - if contest_id: - contest = Contest.objects.get(pk=contest_id) - dailywalk_filter = Q( - dailywalk__date__range=(contest.start, contest.end) - ) - - filters = Q(contests__contest_id=contest_id) - annotate = { - "is_new": ExpressionWrapper( - Q( - created__gte=contest.start_promo, - created__lt=contest.end + timedelta(days=1), - ), - output_field=BooleanField(), - ), - "dw_count": Count("dailywalk", filter=dailywalk_filter), - "dw_steps": Sum("dailywalk__steps", filter=dailywalk_filter), - "dw_distance": Sum( - "dailywalk__distance", filter=dailywalk_filter - ), - } - intentionalwalk_filter = Q( - intentionalwalk__start__gte=contest.start, - intentionalwalk__start__lt=contest.end + timedelta(days=1), - ) - else: - filters = Q() - annotate = { - "dw_count": Count("dailywalk"), - "dw_steps": Sum("dailywalk__steps"), - "dw_distance": Sum("dailywalk__distance"), - } - intentionalwalk_filter = Q() - - intentionalwalk_annotate = { - "iw_count": Count( - "intentionalwalk", filter=intentionalwalk_filter - ), - "iw_steps": Sum( - "intentionalwalk__steps", filter=intentionalwalk_filter - ), - "iw_distance": Sum( - "intentionalwalk__distance", filter=intentionalwalk_filter - ), - "iw_time": Sum( - "intentionalwalk__walk_time", filter=intentionalwalk_filter - ), - } - - # filter to show users vs testers - filters &= Q(is_tester=is_tester) - - # filter by search query - if query: - filters &= Q(Q(name__icontains=query) | Q(email__icontains=query)) - - # set ordering - order = [] - if order_by: - desc = order_by.startswith("-") - field = F(order_by[1:] if desc else order_by) - order.append( - field.desc(nulls_last=True) - if desc - else field.asc(nulls_first=False) - ) - order.append(F("name")) - - return { - "annotate": annotate, - "intentionalwalk_annotate": intentionalwalk_annotate, - "contest_id": contest_id, - "filters": filters, - "order_by": order, - "page": page, - "per_page": per_page, - } +""" +This module contains serializers that are used for parsing and validating request data. + +Each serializer in this module corresponds to a specific API endpoint. The serializer's +`validate` method is responsible for validating the incoming request data and preparing +it for further processing. +""" + +from datetime import timedelta + +from django.db.models import BooleanField, Count, ExpressionWrapper, F, Q, Sum +from rest_framework import serializers + +from home.models import Contest + + +class GetUsersReqSerializer(serializers.Serializer): + contest_id = serializers.CharField( + required=False, + help_text="The ID of the contest to filter by." + + "Providing this also will add additional metrics related to te contest.", + ) + # If true, will only return tester accounts. + is_tester = serializers.BooleanField( + required=False, help_text="If true, will only return tester accounts." + ) + # Choices are: age, contests, created, dailywalk, device, email, gender, gender_other, id, + # intentionalwalk, is_latino, is_sf_resident, is_tester, iw_count, iw_distance, iw_steps, + # iw_time, leaderboard, name, race, race_other, sexual_orien, sexual_orien_other, updated, + # weeklygoal, zip. + # TODO: Can move this to the choices field tuple. + # which will allow some tools to auto-pick up. + order_by = serializers.CharField( + required=False, + help_text="The field to order the results by. Prefix with '-' to order in descending order." + + "The secondary sort and default sort will be lexicographically, the 'name'.", + ) + page = serializers.IntegerField( + required=False, + help_text="The page number to return. Defaults to 1.", + default=1, + ) + query = serializers.CharField( + required=False, + help_text="Query string to filter for containment in the name or email.", + ) + + def validate(self, data): + """Validates and prepares the incoming request data. + + Converts the request params into FilterSet params and annotations. + """ + contest_id = data.get("contest_id") + is_tester = data.get("is_tester") + order_by = data.get("order_by") + page = data.get("page") or 1 + per_page = 25 + query = data.get("query") + + # filter and annotate based on contest_id + filters, annotate, intentionalwalk_filter = None, None, None + if contest_id: + contest = Contest.objects.get(pk=contest_id) + dailywalk_filter = Q( + dailywalk__date__range=(contest.start, contest.end) + ) + + filters = Q(contests__contest_id=contest_id) + annotate = { + "is_new": ExpressionWrapper( + Q( + created__gte=contest.start_promo, + created__lt=contest.end + timedelta(days=1), + ), + output_field=BooleanField(), + ), + "dw_count": Count("dailywalk", filter=dailywalk_filter), + "dw_steps": Sum("dailywalk__steps", filter=dailywalk_filter), + "dw_distance": Sum( + "dailywalk__distance", filter=dailywalk_filter + ), + } + intentionalwalk_filter = Q( + intentionalwalk__start__gte=contest.start, + intentionalwalk__start__lt=contest.end + timedelta(days=1), + ) + else: + filters = Q() + annotate = { + "dw_count": Count("dailywalk"), + "dw_steps": Sum("dailywalk__steps"), + "dw_distance": Sum("dailywalk__distance"), + } + intentionalwalk_filter = Q() + + intentionalwalk_annotate = { + "iw_count": Count( + "intentionalwalk", filter=intentionalwalk_filter + ), + "iw_steps": Sum( + "intentionalwalk__steps", filter=intentionalwalk_filter + ), + "iw_distance": Sum( + "intentionalwalk__distance", filter=intentionalwalk_filter + ), + "iw_time": Sum( + "intentionalwalk__walk_time", filter=intentionalwalk_filter + ), + } + + # filter to show users vs testers + filters &= Q(is_tester=is_tester) + + # filter by search query + if query: + filters &= Q(Q(name__icontains=query) | Q(email__icontains=query)) + + # set ordering + order = [] + if order_by: + desc = order_by.startswith("-") + field = F(order_by[1:] if desc else order_by) + order.append( + field.desc(nulls_last=True) + if desc + else field.asc(nulls_first=False) + ) + order.append(F("name")) + + return { + "annotate": annotate, + "intentionalwalk_annotate": intentionalwalk_annotate, + "contest_id": contest_id, + "filters": filters, + "order_by": order, + "page": page, + "per_page": per_page, + } diff --git a/home/views/api/serializers/response_serializers.py b/home/views/api/serializers/response_serializers.py index ee1c6780..e4ecfbd8 100644 --- a/home/views/api/serializers/response_serializers.py +++ b/home/views/api/serializers/response_serializers.py @@ -1,37 +1,37 @@ -""" -This module contains serializers that are used for formatting the response data. - -Each serializer in this module corresponds to a specific API endpoint. -These serve to map internal Python data types -to JSON-compatible data types that can be sent in the HTTP response, -and to clearly document the structure of the response data. -""" -from rest_framework import serializers -from home.models import Account - - -class GetUsersRespSerializer(serializers.ModelSerializer): - # Daily walk metrics. - dw_count = serializers.IntegerField() - dw_steps = serializers.IntegerField() - dw_distance = serializers.FloatField() - - # Contest-id specific metrics. These only appear if contest_id - # was specified in the query URL. - iw_count = serializers.IntegerField(required=False) - iw_steps = serializers.IntegerField(required=False) - iw_distance = serializers.FloatField(required=False) - iw_time = serializers.IntegerField(required=False) - - # True if the user's Account was created within the contest period. - is_new = serializers.BooleanField( - required=False, - ) - # True is the user has walked at least one step. - is_active = serializers.BooleanField( - required=False, - ) - - class Meta: - model = Account - fields = "__all__" +""" +This module contains serializers that are used for formatting the response data. + +Each serializer in this module corresponds to a specific API endpoint. +These serve to map internal Python data types +to JSON-compatible data types that can be sent in the HTTP response, +and to clearly document the structure of the response data. +""" +from rest_framework import serializers +from home.models import Account + + +class GetUsersRespSerializer(serializers.ModelSerializer): + # Daily walk metrics. + dw_count = serializers.IntegerField() + dw_steps = serializers.IntegerField() + dw_distance = serializers.FloatField() + + # Contest-id specific metrics. These only appear if contest_id + # was specified in the query URL. + iw_count = serializers.IntegerField(required=False) + iw_steps = serializers.IntegerField(required=False) + iw_distance = serializers.FloatField(required=False) + iw_time = serializers.IntegerField(required=False) + + # True if the user's Account was created within the contest period. + is_new = serializers.BooleanField( + required=False, + ) + # True is the user has walked at least one step. + is_active = serializers.BooleanField( + required=False, + ) + + class Meta: + model = Account + fields = "__all__" diff --git a/home/views/api/utils.py b/home/views/api/utils.py index 8a667c2f..fb60b6b7 100644 --- a/home/views/api/utils.py +++ b/home/views/api/utils.py @@ -1,89 +1,89 @@ -import functools -from math import ceil -from typing import Any, Dict, List, Callable -from django.http import HttpResponse -from django.views import View - - -def paginate(request, results, page, per_page): - count = results.count() - pages_count = ceil(count / per_page) - base_url = f"{request.scheme}://{request.get_host()}{request.path}" - query = request.GET.copy() - links = [] - if page < pages_count: - query["page"] = page + 1 - links.append(f'<{base_url}?{query.urlencode()}>; rel="next"') - if page < pages_count - 1: - query["page"] = pages_count - links.append(f'<{base_url}?{query.urlencode()}>; rel="last"') - if page > 2: - query["page"] = 1 - links.append(f'<{base_url}?{query.urlencode()}>; rel="first"') - if page > 1: - query["page"] = page - 1 - links.append(f'<{base_url}?{query.urlencode()}>; rel="prev"') - return ( - results[(page - 1) * per_page : page * per_page], # noqa: E203 - ", ".join(links), - ) - - -def validate_request_json( - json_data: Dict[str, Any], required_fields: List[str] -) -> Dict[str, str]: - """Generic function to check the request json payload for required fields - and create an error response if missing - - Parameters - ---------- - json_data - Input request json converted to a python dict - required_fields - Fields required in the input json - - Returns - ------- - Dictionary with a boolean indicating if the input json is validated and - an optional error message - - """ - # Create a default success message - response = {"status": "success"} - for required_field in required_fields: - if required_field not in json_data: - # Set the error fields - response["status"] = "error" - response[ - "message" - ] = f"Required input '{required_field}' missing in the request" - # Fail on the first missing key - break - - return response - - -def require_authn(func: Callable[[View, Any, Any], HttpResponse]): - """Decorator for Django View methods to require authn. - - Checks if the request's user is authenticated. If not, returns a 401 HttpResponse. - Otherwise, calls the decorated method. - - Parameters - ---------- - func: - The View method to decorate. - - Returns - ------- - The decorated method. - - """ - - @functools.wraps(func) - def wrapper(self, *args, **kwargs): - if not self.request.user.is_authenticated: - return HttpResponse(status=401) - return func(self, *args, **kwargs) - - return wrapper +import functools +from math import ceil +from typing import Any, Dict, List, Callable +from django.http import HttpResponse +from django.views import View + + +def paginate(request, results, page, per_page): + count = results.count() + pages_count = ceil(count / per_page) + base_url = f"{request.scheme}://{request.get_host()}{request.path}" + query = request.GET.copy() + links = [] + if page < pages_count: + query["page"] = page + 1 + links.append(f'<{base_url}?{query.urlencode()}>; rel="next"') + if page < pages_count - 1: + query["page"] = pages_count + links.append(f'<{base_url}?{query.urlencode()}>; rel="last"') + if page > 2: + query["page"] = 1 + links.append(f'<{base_url}?{query.urlencode()}>; rel="first"') + if page > 1: + query["page"] = page - 1 + links.append(f'<{base_url}?{query.urlencode()}>; rel="prev"') + return ( + results[(page - 1) * per_page : page * per_page], # noqa: E203 + ", ".join(links), + ) + + +def validate_request_json( + json_data: Dict[str, Any], required_fields: List[str] +) -> Dict[str, str]: + """Generic function to check the request json payload for required fields + and create an error response if missing + + Parameters + ---------- + json_data + Input request json converted to a python dict + required_fields + Fields required in the input json + + Returns + ------- + Dictionary with a boolean indicating if the input json is validated and + an optional error message + + """ + # Create a default success message + response = {"status": "success"} + for required_field in required_fields: + if required_field not in json_data: + # Set the error fields + response["status"] = "error" + response[ + "message" + ] = f"Required input '{required_field}' missing in the request" + # Fail on the first missing key + break + + return response + + +def require_authn(func: Callable[[View, Any, Any], HttpResponse]): + """Decorator for Django View methods to require authn. + + Checks if the request's user is authenticated. If not, returns a 401 HttpResponse. + Otherwise, calls the decorated method. + + Parameters + ---------- + func: + The View method to decorate. + + Returns + ------- + The decorated method. + + """ + + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + if not self.request.user.is_authenticated: + return HttpResponse(status=401) + return func(self, *args, **kwargs) + + return wrapper diff --git a/home/views/api/weeklygoal.py b/home/views/api/weeklygoal.py index f572aa7c..f1d838fa 100644 --- a/home/views/api/weeklygoal.py +++ b/home/views/api/weeklygoal.py @@ -1,174 +1,174 @@ -import json -import logging -from datetime import datetime - -from django.core.exceptions import ObjectDoesNotExist -from django.http import JsonResponse -from django.utils.decorators import method_decorator -from django.views import View -from django.views.decorators.csrf import csrf_exempt - -from home.models import WeeklyGoal, Device -from home.utils.dates import get_start_of_week, DATE_FORMAT - - -from .utils import validate_request_json - -logger = logging.getLogger(__name__) - - -# Exempt from csrf validation -@method_decorator(csrf_exempt, name="dispatch") -class WeeklyGoalCreateView(View): - """View to create or update a weeklygoal for an account""" - - model = WeeklyGoal - http_method_names = ["post"] - - def post(self, request): - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response message - json_status = validate_request_json( - json_data, - required_fields=["account_id", "weekly_goal"], - ) - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Validate weekly_goal json fields - json_status = validate_request_json( - json_data["weekly_goal"], - required_fields=["start_of_week", "steps", "days"], - ) - - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Get the device - try: - device = Device.objects.get(device_id=json_data["account_id"]) - account = device.account - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered device - " - f"{json_data['account_id']}." - " Please register first!" - ), - } - ) - - # Json response template - json_response = { - "status": "success", - "message": "WeeklyGoal saved successfully", - "payload": { - "account_id": account.id, - "weekly_goal": {}, - }, - } - - weekly_goal_update = json_data["weekly_goal"] - - start_of_week = weekly_goal_update["start_of_week"] - start_of_week_update = get_start_of_week( - datetime.strptime(start_of_week, DATE_FORMAT).date() - ) - steps_update = weekly_goal_update["steps"] - days_update = weekly_goal_update["days"] - - # Check if there's already a goal for the week. If there is, - # update the entry. - try: - weekly_goal = WeeklyGoal.objects.get( - account=account, - start_of_week=start_of_week_update, - ) - weekly_goal.steps = steps_update - weekly_goal.days = days_update - weekly_goal.save() - except ObjectDoesNotExist: - # Creation if object is missing - weekly_goal = WeeklyGoal.objects.create( - start_of_week=start_of_week_update, - steps=steps_update, - days=days_update, - account=account, - ) - - # Update the json object - json_response["payload"]["weekly_goal"] = { - "start_of_week": weekly_goal.start_of_week, - "steps": weekly_goal.steps, - "days": weekly_goal.days, - } - - return JsonResponse(json_response) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) - - -@method_decorator(csrf_exempt, name="dispatch") -class WeeklyGoalsListView(View): - """View to retrieve Weekly Goals""" - - model = WeeklyGoal - http_method_name = ["post"] - - def post(self, request): - json_data = json.loads(request.body) - - # Validate json. If any field is missing, send back the response message - json_status = validate_request_json( - json_data, required_fields=["account_id"] - ) - - if "status" in json_status and json_status["status"] == "error": - return JsonResponse(json_status) - - # Get the account - try: - device = Device.objects.get(device_id=json_data["account_id"]) - account = device.account - except ObjectDoesNotExist: - return JsonResponse( - { - "status": "error", - "message": ( - "Unregistered account - " - f"{json_data['account_id']}." - " Please register first!" - ), - } - ) - - # Json response template - json_response = { - "status": "success", - "message": "Weekly goals accessed successfully", - "payload": {}, - } - - # Get weekly goals tied to this account - weekly_goals = list( - WeeklyGoal.objects.filter(account=account).values() - ) - """ for goal in weekly_goals: - goal = model_to_dict(goal) """ - for goal in weekly_goals: - goal["start_of_week"] = goal["start_of_week"].strftime(DATE_FORMAT) - - json_response["payload"] = weekly_goals - - return JsonResponse(json_response) - - def http_method_not_allowed(self, request): - return JsonResponse( - {"status": "error", "message": "Method not allowed!"} - ) +import json +import logging +from datetime import datetime + +from django.core.exceptions import ObjectDoesNotExist +from django.http import JsonResponse +from django.utils.decorators import method_decorator +from django.views import View +from django.views.decorators.csrf import csrf_exempt + +from home.models import WeeklyGoal, Device +from home.utils.dates import get_start_of_week, DATE_FORMAT + + +from .utils import validate_request_json + +logger = logging.getLogger(__name__) + + +# Exempt from csrf validation +@method_decorator(csrf_exempt, name="dispatch") +class WeeklyGoalCreateView(View): + """View to create or update a weeklygoal for an account""" + + model = WeeklyGoal + http_method_names = ["post"] + + def post(self, request): + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response message + json_status = validate_request_json( + json_data, + required_fields=["account_id", "weekly_goal"], + ) + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Validate weekly_goal json fields + json_status = validate_request_json( + json_data["weekly_goal"], + required_fields=["start_of_week", "steps", "days"], + ) + + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Get the device + try: + device = Device.objects.get(device_id=json_data["account_id"]) + account = device.account + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered device - " + f"{json_data['account_id']}." + " Please register first!" + ), + } + ) + + # Json response template + json_response = { + "status": "success", + "message": "WeeklyGoal saved successfully", + "payload": { + "account_id": account.id, + "weekly_goal": {}, + }, + } + + weekly_goal_update = json_data["weekly_goal"] + + start_of_week = weekly_goal_update["start_of_week"] + start_of_week_update = get_start_of_week( + datetime.strptime(start_of_week, DATE_FORMAT).date() + ) + steps_update = weekly_goal_update["steps"] + days_update = weekly_goal_update["days"] + + # Check if there's already a goal for the week. If there is, + # update the entry. + try: + weekly_goal = WeeklyGoal.objects.get( + account=account, + start_of_week=start_of_week_update, + ) + weekly_goal.steps = steps_update + weekly_goal.days = days_update + weekly_goal.save() + except ObjectDoesNotExist: + # Creation if object is missing + weekly_goal = WeeklyGoal.objects.create( + start_of_week=start_of_week_update, + steps=steps_update, + days=days_update, + account=account, + ) + + # Update the json object + json_response["payload"]["weekly_goal"] = { + "start_of_week": weekly_goal.start_of_week, + "steps": weekly_goal.steps, + "days": weekly_goal.days, + } + + return JsonResponse(json_response) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) + + +@method_decorator(csrf_exempt, name="dispatch") +class WeeklyGoalsListView(View): + """View to retrieve Weekly Goals""" + + model = WeeklyGoal + http_method_name = ["post"] + + def post(self, request): + json_data = json.loads(request.body) + + # Validate json. If any field is missing, send back the response message + json_status = validate_request_json( + json_data, required_fields=["account_id"] + ) + + if "status" in json_status and json_status["status"] == "error": + return JsonResponse(json_status) + + # Get the account + try: + device = Device.objects.get(device_id=json_data["account_id"]) + account = device.account + except ObjectDoesNotExist: + return JsonResponse( + { + "status": "error", + "message": ( + "Unregistered account - " + f"{json_data['account_id']}." + " Please register first!" + ), + } + ) + + # Json response template + json_response = { + "status": "success", + "message": "Weekly goals accessed successfully", + "payload": {}, + } + + # Get weekly goals tied to this account + weekly_goals = list( + WeeklyGoal.objects.filter(account=account).values() + ) + """ for goal in weekly_goals: + goal = model_to_dict(goal) """ + for goal in weekly_goals: + goal["start_of_week"] = goal["start_of_week"].strftime(DATE_FORMAT) + + json_response["payload"] = weekly_goals + + return JsonResponse(json_response) + + def http_method_not_allowed(self, request): + return JsonResponse( + {"status": "error", "message": "Method not allowed!"} + ) diff --git a/home/views/apiv2/admin.py b/home/views/apiv2/admin.py new file mode 100644 index 00000000..19feed23 --- /dev/null +++ b/home/views/apiv2/admin.py @@ -0,0 +1,621 @@ +import logging +import os +from datetime import timedelta +from typing import List + +from dateutil import parser +from django.db import connection +from django.db.models import CharField, Count, Q, Sum, Value +from django.db.models.functions import Concat, TruncDate +from django.views.decorators.csrf import csrf_exempt +from ninja import Query, Router +from ninja.errors import HttpError, ValidationError +from ninja.security import django_auth_superuser + +from home.models import Account, Contest, DailyWalk + +from .histogram.histogram import Histogram +from .schemas.admin import ( + AdminHomeSchema, + AdminMeSchema, + ContestOutSchema, + ErrorSchema, + HistogramInSchema, + HistogramOutSchema, + HomeGraphFilter, + UsersByZipInSchema, + UsersByZipOutSchema, +) + +logger = logging.getLogger(__name__) +router = Router() + + +@router.get("/me", response={200: AdminMeSchema, 204: None}) +@csrf_exempt +def get_admin_me(request): + if request.user.is_authenticated: + return { + "id": request.user.id, + "username": request.user.username, + "first_name": request.user.first_name, + "last_name": request.user.last_name, + "email": request.user.email, + } + else: + return 204, None + + +@router.get( + "/home", + response={200: AdminHomeSchema}, + auth=django_auth_superuser, +) +@csrf_exempt +def get_admin_home(request): + filters = {"is_tester": False} + results = Account.objects.filter(**filters).aggregate( + Sum("dailywalk__steps"), + Sum("dailywalk__distance"), + ) + payload = { + "accounts_count": Account.objects.filter(**filters).count(), + "accounts_steps": results["dailywalk__steps__sum"], + "accounts_distance": results["dailywalk__distance__sum"], + } + return payload + + +def get_contest_start_end(qs: HomeGraphFilter) -> tuple[str, str]: + # handle common parameters for all the chart data API endpoints + if qs.contest_id: + try: + contest = Contest.objects.get(pk=qs.contest_id) + start_date = min( + contest.start_baseline, contest.start_promo + ).isoformat() + end_date = contest.end.isoformat() + except Contest.DoesNotExist: + raise HttpError( + 404, f"Cannot find contest with contest_id {qs.contest_id}" + ) + else: + start_date, end_date = None, None + + return start_date, end_date + + +def process_results( + results: list, + start_date: str | None, + end_date: str | None, + is_cumulative: bool = False, +) -> list: + # handle common result processing for the chart data + if len(results) > 0: + if start_date and results[0][0] != f"{start_date}T00:00:00": + results.insert(0, [f"{start_date}T00:00:00", 0]) + if end_date and results[-1][0] != f"{end_date}T00:00:00": + if is_cumulative(): + results.append([f"{end_date}T00:00:00", results[-1][1]]) + else: + results.append([f"{end_date}T00:00:00", 0]) + else: + results.append([start_date, 0]) + results.append([end_date, 0]) + results.insert(0, ["Date", "Count"]) + + return results + + +@router.get( + "/home/users/daily", + response={200: List, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_users_daily(request, qs: Query[HomeGraphFilter]): + start_date, end_date = get_contest_start_end(qs) + filters = Q() + # filter to show users vs testers + filters = filters & Q(is_tester=qs.is_tester) + # filter by date + if start_date: + filters = filters & Q(created__gte=start_date) + if end_date: + filters = filters & Q( + created__lt=parser.parse(end_date) + timedelta(days=1) + ) + results = ( + Account.objects.filter(filters) + .annotate( + date=Concat( + TruncDate("created"), + Value("T00:00:00"), + output_field=CharField(), + ) + ) + .values("date") + .annotate(count=Count("id")) + .order_by("date") + ) + results = [[row["date"], row["count"]] for row in results] + + results = process_results( + results, start_date, end_date, is_cumulative=False + ) + + return results + + +@router.get( + "/home/users/cumulative", + response={200: List, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_users_cumulative(request, qs: Query[HomeGraphFilter]): + start_date, end_date = get_contest_start_end(qs) + conditions = """ + "is_tester"=%s + """ + params = [qs.is_tester] + if start_date: + conditions = f"""{conditions} AND + "created" >= %s + """ + params.append(start_date) + if end_date: + conditions = f"""{conditions} AND + "created" < %s + """ + params.append(parser.parse(end_date) + timedelta(days=1)) + + with connection.cursor() as cursor: + cursor.execute( + f""" + SELECT "date", (SUM("count") OVER (ORDER BY "date"))::int AS "count" + FROM + (SELECT + CONCAT(("created" AT TIME ZONE '{os.getenv("TIME_ZONE", "America/Los_Angeles")}')::date, + 'T00:00:00') AS "date", + COUNT("id") AS "count" + FROM "home_account" + WHERE {conditions} + GROUP BY "date") subquery + ORDER BY "date" + """, + params, + ) + results = cursor.fetchall() + + results = process_results( + results, start_date, end_date, is_cumulative=True + ) + + return results + + +def get_results_walks_daily( + start_date: str = None, + end_date: str = None, + is_tester: bool = False, + value_type=None, +): + filters = Q() + # filter to show users vs testers + filters = filters & Q(account__is_tester=is_tester) + # filter by date + if start_date: + filters = filters & Q(date__gte=start_date) + if end_date: + filters = filters & Q(date__lte=end_date) + results = ( + DailyWalk.objects.filter(filters) + .annotate( + date_time=Concat( + "date", + Value("T00:00:00"), + output_field=CharField(), + ), + ) + .values("date_time") + .annotate( + count=Sum(value_type), + ) + .order_by("date_time") + ) + results = [[row["date_time"], row["count"]] for row in results] + + return results + + +@router.get( + "/home/steps/daily", + response={200: List, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): + start_date, end_date = get_contest_start_end(qs) + results = get_results_walks_daily( + start_date, end_date, qs.is_tester, "steps" + ) + results = process_results( + results, start_date, end_date, is_cumulative=False + ) + + return results + + +@router.get( + "/home/distance/daily", + response={200: List, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_walks_distance_daily(request, qs: Query[HomeGraphFilter]): + start_date, end_date = get_contest_start_end(qs) + results = get_results_walks_daily( + start_date, end_date, qs.is_tester, "distance" + ) + results = process_results( + results, start_date, end_date, is_cumulative=False + ) + + return results + + +def get_results_walks_cumulative( + start_date: str = None, + end_date: str = None, + is_tester: bool = False, + value_type=None, +): + + conditions = """ + "home_account"."is_tester"=%s + """ + params = [is_tester] + if start_date: + conditions = f"""{conditions} AND + "home_dailywalk"."date" >= %s + """ + params.append(start_date) + if end_date: + conditions = f"""{conditions} AND + "home_dailywalk"."date" <= %s + """ + params.append(end_date) + + with connection.cursor() as cursor: + cursor.execute( + f""" + SELECT "date", (SUM("count") OVER (ORDER BY "date"))::int AS "count" + FROM + (SELECT + CONCAT("date", 'T00:00:00') AS "date", + SUM("{value_type}") AS "count" + FROM "home_dailywalk" + JOIN "home_account" ON "home_account"."id"="home_dailywalk"."account_id" + WHERE {conditions} + GROUP BY "date") subquery + ORDER BY "date" + """, + params, + ) + results = cursor.fetchall() + + return results + + +@router.get( + "/home/steps/cumulative", + response={200: List, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): + start_date, end_date = get_contest_start_end(qs) + results = get_results_walks_cumulative( + start_date, + end_date, + is_tester=qs.is_tester, + value_type="steps", + ) + results = process_results(results, start_date, end_date) + + return results + + +@router.get( + "/home/distance/cumulative", + response={200: List, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): + start_date, end_date = get_contest_start_end(qs) + results = get_results_walks_cumulative( + start_date, + end_date, + is_tester=qs.is_tester, + value_type="distance", + ) + results = process_results( + results, start_date, end_date, is_cumulative=True + ) + + return results + + +@router.get( + "/contests", + response={200: List[ContestOutSchema]}, + auth=django_auth_superuser, +) +def get_contests(request): + values = ["contest_id", "start", "end"] + order_by = ["-start"] + results = Contest.objects.values(*values).order_by(*order_by) + + return list(results) + + +@router.get( + "/users/zip", + response={200: UsersByZipOutSchema}, + exclude_none=True, + auth=django_auth_superuser, +) +def get_users_by_zip(request, qs: Query[UsersByZipInSchema]): + values = ["zip"] + order_by = ["zip"] + # if request.user.is_authenticated: + payload = {} + # filter and annotate based on contest_id + filters = None + annotate = { + "count": Count("zip"), + } + contest_id = qs.contest_id + if qs.contest_id: + filters = Q(contests__contest_id=qs.contest_id) + else: + filters = Q() + + # filter to show users vs testers + filters = filters & Q(is_tester=qs.is_tester) + + # query for totals + results = ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + payload["total"] = {r["zip"]: r["count"] for r in results} + + # now query for new if for contest + if contest_id: + contest = Contest.objects.get(pk=contest_id) + filters = filters & Q( + created__gte=contest.start_promo, + created__lt=contest.end + timedelta(days=1), + ) + results = ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + payload["new"] = {r["zip"]: r["count"] for r in results} + + return payload + + +@router.get( + "users/zip/active", + response={200: UsersByZipOutSchema, 404: ErrorSchema}, + auth=django_auth_superuser, +) +def get_users_zip_active(request, qs: Query[UsersByZipInSchema]): + contest_id = qs.contest_id + is_tester = qs.is_tester + payload = {} + try: + contest = Contest.objects.get(pk=contest_id) + except Contest.DoesNotExist: + raise HttpError(404, f"Cannot find contest_id {contest_id}") + + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT zip, COUNT(*) + FROM ( + SELECT DISTINCT(home_account.id), home_account.zip + FROM home_account + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + LEFT JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + LEFT JOIN home_intentionalwalk ON home_account.id=home_intentionalwalk.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + ((home_dailywalk.id IS NOT NULL AND home_dailywalk.date BETWEEN %s AND %s) OR + (home_intentionalwalk.id IS NOT NULL AND + home_intentionalwalk.start >= %s AND home_intentionalwalk.start < %s)) + ) subquery + GROUP BY zip + """, + [ + is_tester, + contest_id, + contest.start, + contest.end, + contest.start, + contest.end + timedelta(days=1), + ], + ) + rows = cursor.fetchall() + payload["total"] = {row[0]: row[1] for row in rows} + cursor.execute( + """ + SELECT zip, COUNT(*) + FROM ( + SELECT DISTINCT(home_account.id), home_account.zip + FROM home_account + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + LEFT JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + LEFT JOIN home_intentionalwalk ON home_account.id=home_intentionalwalk.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + home_account.created >= %s AND home_account.created < %s AND + ((home_dailywalk.id IS NOT NULL AND home_dailywalk.date BETWEEN %s AND %s) OR + (home_intentionalwalk.id IS NOT NULL AND + home_intentionalwalk.start >= %s AND home_intentionalwalk.start < %s)) + ) subquery + GROUP BY zip + """, + [ + is_tester, + contest_id, + contest.start_promo, + contest.end + timedelta(days=1), + contest.start, + contest.end, + contest.start, + contest.end + timedelta(days=1), + ], + ) + rows = cursor.fetchall() + payload["new"] = {row[0]: row[1] for row in rows} + + return payload + + +@router.get( + "users/zip/steps", + response={200: dict[str, float | None], 404: ErrorSchema}, + exclude_none=False, + auth=django_auth_superuser, +) +def get_users_by_zip_median(request, qs: Query[UsersByZipInSchema]): + contest_id = qs.contest_id + is_tester = qs.is_tester + payload = {} + try: + contest = Contest.objects.get(pk=contest_id) + except Contest.DoesNotExist: + raise HttpError(404, f"Cannot find contest_id {contest_id}") + + with connection.cursor() as cursor: + cursor.execute( + """ + SELECT PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sum) + FROM ( + SELECT home_account.id AS id, SUM(home_dailywalk.steps) AS sum + FROM home_account + JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + home_dailywalk.date BETWEEN %s AND %s + GROUP BY (home_account.id) + ) subquery + """, + [is_tester, contest_id, contest.start, contest.end], + ) + row = cursor.fetchone() + + payload["all"] = row[0] + cursor.execute( + """ + SELECT zip, PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sum) + FROM ( + SELECT home_account.id AS id, home_account.zip AS zip, SUM(home_dailywalk.steps) AS sum + FROM home_account + JOIN home_dailywalk ON home_account.id=home_dailywalk.account_id + JOIN home_account_contests ON home_account.id=home_account_contests.account_id + WHERE home_account.is_tester=%s AND + home_account_contests.contest_id=%s AND + home_dailywalk.date BETWEEN %s AND %s + GROUP BY (home_account.id, home_account.zip) + ) subquery + GROUP BY zip + """, + [is_tester, contest_id, contest.start, contest.end], + ) + rows = cursor.fetchall() + for row in rows: + payload[row[0]] = row[1] + + return payload + + +@router.get( + "{model}/histogram", + response={200: HistogramOutSchema, 404: ErrorSchema, 422: ErrorSchema}, + exclude_none=True, + auth=django_auth_superuser, +) +def get_model_histogram( + request, + model: str, + qs: Query[HistogramInSchema], +): + histogram = Histogram(model) + if model not in histogram.supported_models: + raise HttpError( + 404, f"Invalid model and/or {model} does not support histograms" + ) + model = histogram.supported_models[model] + + valid_fields = histogram.supported_fields.get(model, []) + if qs.field not in valid_fields: + raise ValidationError( + { + "non_field_errors": f"{qs.field} is not supported for {model}. Please use one of {valid_fields}." + } + ) + + histogram.set_unit(histogram.field_units.get(qs.field)) + contest = None + if qs.contest_id: + try: + contest = Contest.objects.get(contest_id=qs.contest_id) + except Contest.DoesNotExist: + raise HttpError( + 404, f"Contest with id {qs.contest_id} does not exist." + ) + histogram.set_unit(histogram.field_units.get(qs.field)) + + res = histogram.create_bin_query( + model=model, + field=qs.field, + is_tester=qs.is_tester, + contest=contest, + start_date=qs.start_date, + end_date=qs.end_date, + bin_count=qs.bin_count, + bin_size=qs.bin_size, + bin_custom=qs.bin_custom, + ) + # even bins either specified by bin_size or bin_size computed from bin_count + if res.get("bin_size"): + return 200, { + "data": list( + histogram.fill_missing_bin_idx( + query_set=res["query_set"], + bin_size=res["bin_size"], + total_bin_ct=res["bin_count"], + ) + ), + "unit": res["unit"], + "bin_size": res["bin_size"], + } + + # custom bins + return 200, { + "data": list( + histogram.fill_missing_bin_idx( + query_set=res["query_set"], + bin_custom=res["bin_custom"], + total_bin_ct=res["bin_count"], + ) + ), + "unit": res["unit"], + "bin_custom": res["bin_custom"], + } diff --git a/home/views/apiv2/appuser.py b/home/views/apiv2/appuser.py new file mode 100644 index 00000000..67d93f57 --- /dev/null +++ b/home/views/apiv2/appuser.py @@ -0,0 +1,135 @@ +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Account, Device +from home.models.account import SAN_FRANCISCO_ZIP_CODES +from home.views.apiv2.schemas.account import AccountPatchSchema, AccountSchema + +router = Router() + + +# Determines whether Account is tester account, based on name prefix +def is_tester(name_field: str) -> bool: + possible_prefixes = ["tester-", "tester ", "tester_"] + return any( + [name_field.lower().startswith(prefix) for prefix in possible_prefixes] + ) + + +def update_model(account: Account, json_data: dict): + # Data fields vary based on registration screen + for attr, value in json_data.items(): + if attr != "email": + setattr(account, attr, value) + account.save() + + +@router.post("", response={201: AccountSchema}) +@csrf_exempt +def create_appuser(request, payload: AccountSchema): + # Parse the body json + json_data = payload.dict() + + # Update user attributes if the object exists, else create. + # EMAIL CANNOT BE UPDATED! + # NOTE: Ideally, email ids should be validated by actually sending + # emails. Currently, accidental/intentional use of the same email id + # will share data across devices. In such an instance, a new account + # with the correct email must be created to separate data. Otherwise, + # attribution will be to the account email created first. + + # For a participating device + try: + # NOTE: Account id here maps to a device id. Perhaps the API + # definition could be changed in the future. + # Get the registered device if it exists + device = Device.objects.get(device_id=json_data["account_id"]) + # If it is an email update fail and return + if device.account.email.lower() != json_data["email"].lower(): + raise HttpError(400, "Email cannot be updated. Contact admin") + + # Otherwise, update the account's other details + account = Account.objects.get(email__iexact=json_data["email"]) + update_model(account, json_data) + # return 201, {"account_id": device.device_id, **account.__dict__} + return 201, payload + # This implies that it is a new device + except Device.DoesNotExist: + # Check if the user account exists. If not, create it + try: + account = Account.objects.get(email__iexact=json_data["email"]) + update_model(account, json_data) + except Account.DoesNotExist: + # Partially create account first, with required fields + account = Account.objects.create( + email=json_data["email"], + name=json_data["name"], + zip=json_data["zip"], + age=json_data["age"], + is_tester=is_tester(json_data["name"]), + is_sf_resident=json_data["zip"] in SAN_FRANCISCO_ZIP_CODES, + ) + + # Create a new device object and link it to the account + device = Device.objects.create( + device_id=json_data["account_id"], account=account + ) + + return 201, payload + + +@router.put("/{account_id}", response={204: None}) +@csrf_exempt +def update_appuser(request, account_id: str, payload: AccountSchema): + json_data = payload.dict() + try: + device = Device.objects.get(device_id=account_id) + # If it is an email update, fail and return + if device.account.email.lower() != json_data["email"].lower(): + raise HttpError(400, "Email cannot be updated. Contact admin") + except Device.DoesNotExist: + raise HttpError( + 404, f"Cannot find device registered with account_id: {account_id}" + ) + account = Account.objects.get(email__iexact=device.account.email) + update_model(account, json_data) + + return 204, None + + +@router.patch("/{account_id}", response={204: None}) +@csrf_exempt +def update_appuser(request, account_id: str, payload: AccountPatchSchema): + json_data = payload.dict() + try: + device = Device.objects.get(device_id=account_id) + # If it is an email update, fail and return + if ( + json_data["email"] + and device.account.email.lower() != json_data["email"].lower() + ): + raise HttpError(400, "Email cannot be updated. Contact admin") + except Device.DoesNotExist: + raise HttpError( + 404, f"Cannot find device registered with account_id: {account_id}" + ) + account = Account.objects.get(email__iexact=device.account.email) + update_model(account, payload.dict(exclude_unset=True)) + + return 204, None + + +@router.delete("/{account_id}", response={204: None}) +@csrf_exempt +def delete_appuser(request, account_id: str): + try: + device = Device.objects.get(device_id=account_id) + except Device.DoesNotExist: + raise HttpError( + 404, f"Cannot find device registered with account_id: {account_id}" + ) + device.account.delete() + device.delete() + + return 204, None diff --git a/home/views/apiv2/contest.py b/home/views/apiv2/contest.py new file mode 100644 index 00000000..7e13cf13 --- /dev/null +++ b/home/views/apiv2/contest.py @@ -0,0 +1,18 @@ +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Contest +from home.views.apiv2.schemas.contest import ContestSchema + +router = Router() + + +@router.get("/current", response={200: ContestSchema}) +@csrf_exempt +def get_curent_contest(request): + # get the current/next Contest + contest = Contest.active() + if contest is None: + raise HttpError(404, "There are no contests") + return 200, contest diff --git a/home/views/apiv2/dailywalk.py b/home/views/apiv2/dailywalk.py new file mode 100644 index 00000000..72ed5a3f --- /dev/null +++ b/home/views/apiv2/dailywalk.py @@ -0,0 +1,135 @@ +import logging +from datetime import date + +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Contest, DailyWalk, Device +from home.views.apiv2.schemas.dailywalk import ( + DailyWalkInSchema, + DailyWalkOutSchema, + ErrorSchema, +) + +logger = logging.getLogger(__name__) +router = Router() + + +@router.get( + "/{account_id}", response={200: DailyWalkOutSchema, 404: ErrorSchema} +) +@csrf_exempt +def get_daily_walks(request, account_id: str): + try: + device = Device.objects.get(device_id=account_id) + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{account_id}. " + f"Please register first!" + ), + ) + + # Get walks from tied to this account + # NOTE: This is very hacky and cannot distinguish between legit and + # fake users. + # Someone can simply install the app on a new device and use a known + # email id and have the metrics simply aggregated. + # For the simple use case, this is likely not an issue and would need + # to be handled manually if needed + daily_walks = DailyWalk.objects.filter(account=device.account) + return 200, {"daily_walks": list(daily_walks)} + + +@router.post("", response={201: DailyWalkInSchema}) +@csrf_exempt +def create_daily_walk(request, payload: DailyWalkInSchema): + json_data = payload.dict() + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{json_data['account_id']}. " + f"Please register first!" + ), + ) + active_contests = set() + + # Json response template + json_response = { + "account_id": device.device_id, + "daily_walks": [], + } + + for daily_walk_data in json_data["daily_walks"]: + walk_date = daily_walk_data["date"] + contest = Contest.active( + for_date=date.fromisoformat(walk_date), strict=True + ) + if contest is not None: + active_contests.add(contest) + + # Check if there is already an entry for this date. If there is, + # update the entry. + # NOTE: By definition, there should be one and only one entry for + # a given email and date. + # NOTE: This is a potential vulnerability. Since there is no email + # authentication at the moment, anyone can simply spoof an email + # id with a new device and overwrite daily walk data for the + # target email. This is also a result of no session auth + # (can easily hit the api directly) + try: + # Updation + daily_walk = DailyWalk.objects.get( + account=device.account, date=walk_date + ) + daily_walk.steps = daily_walk_data["steps"] + daily_walk.distance = daily_walk_data["distance"] + daily_walk.device_id = json_data["account_id"] + daily_walk.save() + except DailyWalk.DoesNotExist: + # Create if object is missing + daily_walk = DailyWalk.objects.create( + date=walk_date, + steps=daily_walk_data["steps"], + distance=daily_walk_data["distance"], + device=device, + ) + + # Update the json response object + json_response["daily_walks"].append( + { + "date": daily_walk.date, + "steps": daily_walk.steps, + "distance": daily_walk.distance, + } + ) + + # Register contest for account if the day falls between contest dates + contest = Contest.active(for_date=date.today(), strict=True) + if contest: + active_contests.add(contest) + try: + acct = device.account + acct.contests.add(contest) + except Exception: + logger.error( + f"Could not associate contest " + f"{contest} with account {acct}!", + exc_info=True, + ) + else: + # No active contest + pass + + # Update Leaderboard + for contest in active_contests: + DailyWalk.update_leaderboard(device=device, contest=contest) + + return 201, json_response diff --git a/home/views/apiv2/device.py b/home/views/apiv2/device.py new file mode 100644 index 00000000..d2469fb5 --- /dev/null +++ b/home/views/apiv2/device.py @@ -0,0 +1,72 @@ +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Device +from home.views.apiv2.schemas.device import DeviceInSchema, ErrorSchema + +router = Router() + + +def update_model(device: Device, json_data: dict): + # Data fields vary based on registration screen + for attr, value in json_data.items(): + if attr != "email": + setattr(device, attr, value) + device.save() + + +@router.put("/{device_id}", response={204: None, 404: ErrorSchema}) +@csrf_exempt +def update_device(request, device_id: str, payload: DeviceInSchema): + try: + device = Device.objects.get(device_id=device_id) + except Device.DoesNotExist: + raise HttpError( + 404, + f"""Unregistered device - + device_id: {device_id} + Please register first!""", + ) + update_model(device, payload.dict()) + + return 204, None + + +@router.patch("/{device_id}", response={204: None, 404: ErrorSchema}) +@csrf_exempt +def patch_device(request, device_id: str, payload: DeviceInSchema): + try: + device = Device.objects.get(device_id=device_id) + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{device_id}. " + f"Please register first!" + ), + ) + update_model(device, payload.dict(exclude_unset=True)) + + return 204, None + + +@router.delete("/{device_id}", response={204: None, 404: ErrorSchema}) +@csrf_exempt +def delete_device(request, device_id: str): + try: + device = Device.objects.get(device_id=device_id) + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{device_id}. " + f"Please register first!" + ), + ) + device.account.delete() + device.delete() + + return 204, None diff --git a/home/views/apiv2/export.py b/home/views/apiv2/export.py new file mode 100644 index 00000000..86e4f19a --- /dev/null +++ b/home/views/apiv2/export.py @@ -0,0 +1,273 @@ +import csv +import logging +import os +import tempfile +from datetime import timedelta + +from django.db.models import BooleanField, Count, ExpressionWrapper, Q, Sum +from django.http import FileResponse, HttpResponse +from django.views import View +from django.views.decorators.csrf import csrf_exempt +from ninja import Router + +from home.models import Account, Contest, DailyWalk + +logger = logging.getLogger(__name__) +router = Router() + +# configure the base CSV headers +CSV_COLUMNS = [ + {"name": "Participant Name", "id": "name"}, + {"name": "Date Enrolled", "id": "created"}, + {"name": "Email", "id": "email"}, + {"name": "Zip Code", "id": "zip"}, + {"name": "Sexual Orientation", "id": "sexual_orien"}, + {"name": "Sexual Orientation Other", "id": "sexual_orien_other"}, + {"name": "Gender Identity", "id": "gender"}, + {"name": "Gender Identity Other", "id": "gender_other"}, + {"name": "Race", "id": "race"}, + {"name": "Race Other", "id": "race_other"}, + {"name": "Is Latino", "id": "is_latino"}, + {"name": "Age", "id": "age"}, + {"name": "Is New Signup", "id": "is_new"}, + {"name": "Active During Contest", "id": "is_active"}, + {"name": "Total Daily Walks During Contest", "id": "dw_contest_count"}, + { + "name": "Total Daily Walks During Baseline", + "id": "dw_baseline_count", + }, + {"name": "Total Steps During Contest", "id": "dw_contest_steps"}, + {"name": "Total Steps During Baseline", "id": "dw_baseline_steps"}, + { + "name": "Total Recorded Walks During Contest", + "id": "iw_contest_count", + }, + { + "name": "Total Recorded Walks During Baseline", + "id": "iw_baseline_count", + }, + { + "name": "Total Recorded Steps During Contest", + "id": "iw_contest_steps", + }, + { + "name": "Total Recorded Steps During Baseline", + "id": "iw_baseline_steps", + }, + { + "name": "Total Recorded Walk Time During Contest", + "id": "iw_contest_time", + }, + { + "name": "Total Recorded Walk Time During Baseline", + "id": "iw_baseline_time", + }, +] + + +def get_dailywalk_stats(name, ids, dailywalk_filter): + filters = Q(id__in=ids) + values = ["id"] + annotate = { + f"dw_{name}_count": Count("dailywalk", filter=dailywalk_filter), + f"dw_{name}_steps": Sum("dailywalk__steps", filter=dailywalk_filter), + f"dw_{name}_distance": Sum( + "dailywalk__distance", filter=dailywalk_filter + ), + } + order_by = ["id"] + return ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + + +def get_intentionalwalk_stats(name, ids, intentionalwalk_filter): + filters = Q(id__in=ids) + values = ["id"] + annotate = { + f"iw_{name}_count": Count( + "intentionalwalk", filter=intentionalwalk_filter + ), + f"iw_{name}_steps": Sum( + "intentionalwalk__steps", filter=intentionalwalk_filter + ), + f"iw_{name}_distance": Sum( + "intentionalwalk__distance", filter=intentionalwalk_filter + ), + f"iw_{name}_time": Sum( + "intentionalwalk__walk_time", filter=intentionalwalk_filter + ), + } + order_by = ["id"] + return ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + + +def get_daily_walks(ids, contest): + filters = Q( + account_id__in=ids, date__range=(contest.start_baseline, contest.end) + ) + values = ["account_id", "date", "steps"] + order_by = ["account_id", "date"] + return ( + DailyWalk.objects.filter(filters).values(*values).order_by(*order_by) + ) + + +def export_contest_users_data(file, contest_id, is_tester): + # get the Contest object + contest = Contest.objects.get(pk=contest_id) + + # configure the CSV writer + fieldnames = [col["id"] for col in CSV_COLUMNS] + header = {col["id"]: col["name"] for col in CSV_COLUMNS} + # add headers for every day in the output range (start of baseline to end of contest) + for dt in range((contest.end - contest.start_baseline).days + 1): + date = contest.start_baseline + timedelta(days=dt) + fieldnames.append(str(date)) + header[str(date)] = str(date) + writer = csv.DictWriter(file, fieldnames=fieldnames, extrasaction="ignore") + writer.writerow(header) + + # query for the base attributes + filters = Q(contests__contest_id=contest_id, is_tester=is_tester) + values = [ + "id", + "created", + "name", + "email", + "age", + "zip", + "gender", + "gender_other", + "sexual_orien", + "sexual_orien_other", + "race", + "race_other", + "is_latino", + ] + annotate = { + "is_new": ExpressionWrapper( + Q( + created__gte=contest.start_promo, + created__lt=contest.end + timedelta(days=1), + ), + output_field=BooleanField(), + ), + } + order_by = ["id"] + results = ( + Account.objects.filter(filters) + .values(*values) + .annotate(**annotate) + .order_by(*order_by) + ) + + # set up to process in batches + offset = 0 + limit = 25 + total = results.count() + while offset < total: + ids = [] + rows = [] + for row in results[offset : offset + limit]: # noqa E203 + # convert race Set into a comma delimited string + row["race"] = ",".join(row["race"]) + # gather all rows and ids + rows.append(row) + ids.append(row["id"]) + offset = offset + limit + + # add in the baseline period dailywalk stats + dw_baseline_results = get_dailywalk_stats( + "baseline", + ids, + Q( + dailywalk__date__range=( + contest.start_baseline, + contest.start - timedelta(days=1), + ) + ), + ) + for i, row in enumerate(dw_baseline_results): + rows[i].update(row) + + # add in the contest period dailywalk stats + dw_contest_results = get_dailywalk_stats( + "contest", + ids, + Q(dailywalk__date__range=(contest.start, contest.end)), + ) + for i, row in enumerate(dw_contest_results): + rows[i].update(row) + + # add in the baseline period intentionalwalk stats + iw_baseline_results = get_intentionalwalk_stats( + "baseline", + ids, + Q( + intentionalwalk__start__gte=contest.start_baseline, + intentionalwalk__start__lt=contest.start, + ), + ) + for i, row in enumerate(iw_baseline_results): + rows[i].update(row) + + # add in the contest period intentionalwalk stats + iw_contest_results = get_intentionalwalk_stats( + "contest", + ids, + Q( + intentionalwalk__start__gte=contest.start, + intentionalwalk__start__lt=contest.end + timedelta(days=1), + ), + ) + for i, row in enumerate(iw_contest_results): + rows[i].update(row) + # at this point, we have enough info to determine if user is "active" + rows[i]["is_active"] = ( + rows[i]["dw_contest_count"] > 0 + or rows[i]["iw_contest_count"] > 0 + ) + + # now add in every day of step data for each user + daily_walks = get_daily_walks(ids, contest) + rows_iter = iter(rows) + account = next(rows_iter) + for row in daily_walks: + while row["account_id"] != account["id"]: + account = next(rows_iter) + account[str(row["date"])] = row["steps"] + + # finally, write it out to the CSV...! + writer.writerows(rows) + + +@router.get("/users") +@csrf_exempt +def export_users(request, contest_id: str, is_tester: str): + is_tester = is_tester == "true" + + if not contest_id: + return HttpResponse(status=422) + elif not request.user.is_authenticated: + return HttpResponse(status=401) + + try: + tmp_file = tempfile.NamedTemporaryFile(delete=False) + with open(tmp_file.name, "w") as file: + export_contest_users_data(file, contest_id, is_tester) + return FileResponse( + open(tmp_file.name, "rb"), + as_attachment=True, + filename="users_agg.csv", + ) + finally: + os.remove(tmp_file.name) diff --git a/home/views/apiv2/histogram/histogram.py b/home/views/apiv2/histogram/histogram.py new file mode 100644 index 00000000..dbc52a18 --- /dev/null +++ b/home/views/apiv2/histogram/histogram.py @@ -0,0 +1,422 @@ +import itertools +from datetime import date, datetime, time +from typing import List, Optional, TypedDict + +from django.db import models as djmodel +from django.db.models import ( + Case, + Count, + ExpressionWrapper, + F, + IntegerField, + Max, + Min, + Q, + QuerySet, + Value, + When, +) +from django.db.models.functions import Floor +from django.utils.timezone import make_aware +from ninja.errors import ValidationError + +from home.models import ( + Account, + Contest, + DailyWalk, + IntentionalWalk, + Leaderboard, +) + + +class ValidatedHistogramReq(TypedDict): + query_set: QuerySet + # bin_size is provided if the bins were + # generated with equal bin sizes, + # otherwise the bin_custom is provided. + bin_size: Optional[int] + bin_custom: List[int] + bin_count: int + unit: str + + +class Histogram: + # TODO: Implement a ResponseSerializer + # to handle serialization of the response + # (with filling of the bins) + # to either JSON or CSV format. + model_name: str = None + supported_models = { + "users": Account, + "dailywalk": DailyWalk, + "intentionalwalk": IntentionalWalk, + "leaderboard": Leaderboard, + } + + supported_fields = { + Leaderboard: ["steps"], + DailyWalk: ["steps", "distance"], + IntentionalWalk: ["steps", "distance"], + Account: ["age"], + } + + field_units = { + "steps": "steps", + "distance": "miles", + "age": "years", + } + + def __init__(self, model): + self.model: djmodel.Model = model + self.__unit: str = None + + @property + def unit(self): + return self.__unit + + def set_unit(self, unit): + self.__unit = unit + + def create_bin_query( + self, + model: djmodel.Model, + field: str, + is_tester: bool, + contest: Contest, + start_date: str, + end_date: str, + bin_count: int, + bin_size: int, + bin_custom: List[int], + ) -> ValidatedHistogramReq: + """Handle bin_count generates a histogram based on the bin_count.""" + date_filter: Q = self.get_date_filter( + model=model, + start_date=start_date, + end_date=end_date, + contest=contest, + is_tester=is_tester, + ) + + range = model.objects.filter(date_filter).aggregate( + max_value=Max(field), + min_value=Min(field), + ) + + upper, lower = range.get("max_value"), range.get("min_value") + if not upper or not lower: + # no data was found in the range. + return { + "query_set": model.objects.none(), + "bin_count": bin_count, + "bin_custom": bin_custom, + "bin_size": bin_size, + "unit": self.unit, + } + + if bin_count: + # Using an offset because dividing by N equal parts + # will result in N+1 bins. + # Ex: Data ranging from 0 to 11 and divide it into two bins. + # 11 // 2 = 5, results in 3 bins: 0-5, 5-10, 10-15 to capture the 11. + offset = 1 + bin_size = (upper - 0) // (bin_count - offset) + + # Prevent division by zero when the + # range is less than the bin_count itself. + # We'll just drop everything into one bin. + bin_size = max(bin_size, 1) + elif bin_size: + # Using a lower bound is an optimization + # to reduce the search space for the query + # for performant queries on larger or sparse datasets. + bin_count = (upper - lower) // bin_size + + if bin_custom: + # For custom bins, we'll create a case statement. + cases = [ + When( + Q( + **{ + f"{field}__gte": bin_start, + f"{field}__lt": bin_end, + } + ), + then=Value(bin_start), + ) + for bin_start, bin_end in zip(bin_custom, bin_custom[1:]) + ] + + # SQL equivalent: + # SELECT + # CASE + # WHEN >= AND < THEN + # ... + # WHEN >= AND < THEN + # END AS bin_start, + # END AS bin_start, + # COUNT(*) AS count + # FROM + # JOIN ON .id = ._id # (if necessary) + # WHERE + # GROUP BY bin_start + # ORDER BY bin_start + last_bin = Value(bin_custom[-1]) + query_set = ( + model.objects.filter(date_filter) + .annotate( + bin_start=Case( + *cases, default=last_bin, output_field=IntegerField() + ), + ) + .values("bin_start") + .annotate(count=Count("*")) + .order_by("bin_start") + ) + + idx_lookup = { + bin_start: idx for idx, bin_start in enumerate(bin_custom) + } + + def get_bin_end(bin_start): + idx_of_end = idx_lookup[bin_start] + 1 + if idx_of_end >= len(bin_custom): + return upper + return bin_custom[idx_of_end] + + def get_bin_idx(bin_start): + return idx_lookup[bin_start] + + query_set = ( + { + "bin_start": bin["bin_start"], + "bin_end": get_bin_end(bin["bin_start"]), + "bin_idx": get_bin_idx(bin["bin_start"]), + "count": bin["count"], + } + for bin in query_set + ) + + return { + "query_set": query_set, + "bin_count": len(bin_custom), + "bin_custom": bin_custom, + "unit": self.unit, + } + + # For equal bins, we'll use the bin_size to generate the bins. + + # SQL equivalent: + # SELECT + # FLOOR( / ) AS bin_idx, + # FLOOR( / ) * AS bin_start, + # FLOOR( / ) * + AS bin_end, + # COUNT(*) AS count, + # FROM + # JOIN ON .id = ._id # (if necessary) + # WHERE + # GROUP BY bin + # ORDER BY bin + query_set = ( + model.objects.filter(date_filter) + .annotate( + bin_idx=ExpressionWrapper( + Floor(F(field) / bin_size), output_field=IntegerField() + ), + bin_start=ExpressionWrapper( + Floor(F(field) / bin_size) * bin_size, + output_field=IntegerField(), + ), + bin_end=ExpressionWrapper( + Floor(F(field) / bin_size) * bin_size + bin_size, + output_field=IntegerField(), + ), + ) + .values("bin_idx", "bin_start", "bin_end") + .annotate(count=Count("*")) + .order_by("bin_idx") + ) + + return { + "query_set": query_set, + "bin_size": bin_size, + "bin_count": bin_count, + "unit": self.unit, + } + + def get_date_filter( + self, + model: djmodel.Model, + contest: Optional[Contest], + start_date: Optional[date], + end_date: Optional[date], + is_tester: Optional[bool], + ) -> Q: + """Generates the correct filters for the given model. + + This is needed since the columns for dates are inconsistent across models, + and different depending on relationships. + """ + kwargs = {} + if model is Leaderboard: + if not contest: + raise ValidationError( + { + "contest_id": "contest_id is required for Leaderboard model." + } + ) + if start_date or end_date: + raise ValidationError( + { + "non_field_errors": "start_date and end_date is not supported for the Leaderboard model." + } + ) + + kwargs = { + # This looks wrong, but yes - the `contest` model *actually* + # does stutter; + # It is home_contest.contest_id. + "contest__contest_id": contest.contest_id if contest else None, + "account__is_tester": is_tester, + } + elif model is Account: + # NOTE: + # For accounts, the "contest_id" and + # "start_date" and "end_date" will refer + # not to the account's creation date, + # but their participation in any walk during that contest period + # OR the date range. + + if contest: + # Attach time and timezone to the dates, + # because in the database, the dates are stored as + # date objects with no time and timezone. + # We'll use the beginning of the start_date day, + # and the end of the end_date day to capture all records + start_date = make_aware( + datetime.combine( + contest.start_baseline or contest.start, time.min + ) + ) + end_date = make_aware(datetime.combine(contest.end, time.max)) + + # SQL equivalent: + # SELECT * FROM account WHERE id IN ( + # SELECT account_id FROM intentional_walk + # WHERE start_date BETWEEN AND + # AND end_date BETWEEN AND + # UNION + # SELECT account_id FROM daily_walk + # WHERE date BETWEEN contest.start_date AND contest.end_date + # AND end_date BETWEEN contest.start_date AND contest.end_date + # ) + iw_kwargs = { + k: v + for k, v in { + "start__lte": end_date, + "start__gte": start_date, + "end__lte": end_date, + "end__gte": start_date, + }.items() + if v is not None + } + dw_kwargs = { + k: v + for k, v in { + "date__lte": end_date, + "date__gte": start_date, + }.items() + if v is not None + } + + kwargs = {"is_tester": is_tester} + if iw_kwargs or dw_kwargs: + account_walkers = ( + IntentionalWalk.objects.filter(**iw_kwargs) + .values("account_id") + .distinct() + .union( + DailyWalk.objects.filter(**dw_kwargs) + .values("account_id") + .distinct() + ) + ) + kwargs["id__in"] = account_walkers + elif model is DailyWalk: + kwargs = { + "date__lte": end_date, + "date__gte": start_date, + "account__is_tester": is_tester, + } + elif model is IntentionalWalk: + kwargs = { + "start__lte": end_date, + "start__gte": start_date, + "end__lte": end_date, + "end__gte": start_date, + "account__is_tester": is_tester, + } + else: + raise ValidationError( + {"non_field_errors": f"{model} is not yet supported."} + ) + # Remove None values from the kwargs, + # as they indicate optional fields that were not provided. + return Q(**{k: v for k, v in kwargs.items() if v is not None}) + + def fill_missing_bin_idx( + self, + query_set, + bin_size: int = None, + bin_custom: list = None, + total_bin_ct: int = 0, + ): + """Fill in missing bin intervals lazily. + + This is because the histogram is generated from a query set that may not have + found data in certain bins. + + For example, if the bins were [0, 18, 20, 33, 50, 70], + which creates bins from 0-17, 18-20, 20-33, 33-50, 50-70. + + There may be no users in in the 18-20 range, and no users in the 51 and 70. + In other words, missing on bin_idx = 1 and bin_idx = 4. + The query would not return any groupings for the [18, 20], or [50, 70] + This function will fill in those missing bins with a count of 0. + #""" + + def create_filler(cursor, bin_size, bin_custom): + res = {} + # bin_start and bin_end are inclusive. + if bin_custom: + res["bin_start"] = bin_custom[cursor] + if cursor + 1 < len(bin_custom): + res["bin_end"] = bin_custom[cursor + 1] + else: + res["bin_start"] = cursor * bin_size + res["bin_end"] = (cursor + 1) * bin_size + # Done down here to maintain stable order of keys. + res["count"] = 0 + res["bin_idx"] = cursor + return res + + bin_idx_counter = itertools.count() + cursor = 0 + for bin in query_set: + cursor = next(bin_idx_counter) + curr_idx = bin["bin_idx"] + while curr_idx > cursor: + yield create_filler( + cursor=cursor, bin_size=bin_size, bin_custom=bin_custom + ) + cursor = next(bin_idx_counter) + yield bin + + cursor = next(bin_idx_counter) + # Fill in the rest of the bins with 0 count, + # until we reach the total expected count of bins. + while cursor and cursor < total_bin_ct: + yield create_filler( + cursor=cursor, bin_size=bin_size, bin_custom=bin_custom + ) + cursor = next(bin_idx_counter) diff --git a/home/views/apiv2/intentionalwalk.py b/home/views/apiv2/intentionalwalk.py new file mode 100644 index 00000000..1b43ca6c --- /dev/null +++ b/home/views/apiv2/intentionalwalk.py @@ -0,0 +1,96 @@ +import logging + +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Device, IntentionalWalk +from home.views.apiv2.schemas.intentionalwalk import ( + ErrorSchema, + IntentionalWalkInSchema, + IntentionalWalkOutSchema, +) + +logger = logging.getLogger(__name__) +router = Router() + + +@router.get( + "/{account_id}", response={200: IntentionalWalkOutSchema, 404: ErrorSchema} +) +@csrf_exempt +def get_intentional_walks(request, account_id: str): + try: + device = Device.objects.get(device_id=account_id) + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{account_id}. " + f"Please register first!" + ), + ) + # Get walks from tied to this account + # NOTE: This is very hacky and cannot distinguish between legit and + # fake users. + # Someone can simply install the app on a new device and use a known + # email id and have the metrics simply aggregated. + # For the simple use case, this is likely not an issue and would need + # to be handled manually if needed + intentional_walks = IntentionalWalk.objects.filter(account=device.account) + + return 200, {"intentional_walks": list(intentional_walks)} + + +@router.post("", response={201: IntentionalWalkInSchema, 404: ErrorSchema}) +@csrf_exempt +def create_intentional_walk(request, payload: IntentionalWalkInSchema): + json_data = payload.dict() + + # Get the device if already registered + try: + device = Device.objects.get(device_id=json_data["account_id"]) + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{json_data['account_id']}. " + f"Please register first!" + ), + ) + json_response = { + "account_id": device.device_id, + "intentional_walks": [], + } + + # Loop through all the intentional walks + for intentional_walk_data in json_data["intentional_walks"]: + try: + intentional_walk = IntentionalWalk.objects.create( + event_id=intentional_walk_data["event_id"], + start=intentional_walk_data["start"], + end=intentional_walk_data["end"], + steps=intentional_walk_data["steps"], + distance=intentional_walk_data["distance"], + pause_time=intentional_walk_data["pause_time"], + device=device, + ) + + json_response["intentional_walks"].append( + { + "event_id": intentional_walk.event_id, + "start": intentional_walk.start, + "end": intentional_walk.end, + "steps": intentional_walk.steps, + "distance": intentional_walk.distance, + "pause_time": intentional_walk.pause_time, + } + ) + except Exception: + # IntentionalWalk records are immutable- so ignore any errors + # that might occur if the record already exists, etc... + pass + + return 201, payload diff --git a/home/views/apiv2/leaderboard.py b/home/views/apiv2/leaderboard.py new file mode 100644 index 00000000..9138f5d9 --- /dev/null +++ b/home/views/apiv2/leaderboard.py @@ -0,0 +1,79 @@ +from django.db.models import F +from django.db.models.expressions import Window +from django.db.models.functions import Rank +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Contest, Device, Leaderboard +from home.views.apiv2.schemas.leaderboard import ErrorSchema, LeaderboardSchema + +router = Router() + + +@router.get("/get", response={200: LeaderboardSchema, 404: ErrorSchema}) +@csrf_exempt +def get_leaderboard(request, contest_id: str, device_id: str): + current_contest = Contest.objects.filter(contest_id=contest_id) + if not current_contest: + raise HttpError(404, "Contest not found") + + # http://localhost:8000/api/v2/leaderboard/ + # get?contest_id=&device_id= + + # Validate request. If any field is missing, + # send back the response message + # Get the device if already registered + try: + device = Device.objects.get(device_id=device_id) + except Device.DoesNotExist: + raise HttpError( + 404, ( + f"Unregistered device - " + f"{device_id}. " + f"Please register first!"), + ) + + # Json response template + json_response = {"leaderboard": []} + + leaderboard_list = [] + leaderboard_length = 10 + leaderboard = ( + Leaderboard.objects.filter( + contest_id=contest_id, + account__is_tester=device.account.is_tester, + ) + .values("account_id", "steps") + .annotate(rank=Window(expression=Rank(), order_by=F("steps").desc())) + ) + + # get top 10 + leaderboard_list = list(leaderboard[0:leaderboard_length]) + + # Check if user should be added after top 10 displayed + eleventh_place = True + for user in leaderboard_list: + if user["account_id"] == device.account.id: + user["device_id"] = device_id + eleventh_place = False + break + + # If user not in top 10, add as 11th in list + if eleventh_place: + leaderboard = Leaderboard.objects.filter( + contest_id=contest_id, account=device.account + ).values("account_id", "steps") + if len(leaderboard) > 0: + user = leaderboard[0] + user["device_id"] = device_id + user["rank"] = Leaderboard.objects.filter( + contest_id=contest_id, + steps__gte=user["steps"], + account__is_tester=device.account.is_tester, + ).count() + leaderboard_list.append(user) + + json_response["leaderboard"] = leaderboard_list + + return 200, json_response diff --git a/home/views/apiv2/schemas/account.py b/home/views/apiv2/schemas/account.py new file mode 100644 index 00000000..65807005 --- /dev/null +++ b/home/views/apiv2/schemas/account.py @@ -0,0 +1,241 @@ +from typing import List + +from ninja import Field, Schema +from pydantic import field_validator, model_validator +from typing_extensions import Self + +from home.models.account import ( + SAN_FRANCISCO_ZIP_CODES, + GenderLabels, + IsLatinoLabels, + RaceLabels, + SexualOrientationLabels, +) + + +class AccountSchema(Schema): + account_id: str = Field(description="Account id of the user's account") + name: str = Field(min_length=1, max_length=250, description="User's name") + email: str = Field(decription="Email which uniquely identifies an account") + zip: str = Field(min_length=5, max_length=25, decription="User's zip code") + age: int = Field(ge=1, le=200, description="User's age") + is_latino: IsLatinoLabels | None = Field( + default=None, + description="""Is the user of Latin descent? +

Choices: +
YE = Yes +
NO = No +
DA = Decline to answer""", + ) + race: List[RaceLabels] | None = Field( + default=None, + description="""User's race. +

Choices: +
NA = American Indian or Alaska Native +
BL = Black +
AS = Asian +
PI = Native Hawaiian or other Pacific Islander +
WH = White +
OT = Other +
DA = Decline to answer""", + ) + race_other: str | None = Field( + default=None, + max_length=75, + description="Free-form text field for 'race' value 'OT'", + ) + gender: GenderLabels | None = Field( + default=None, + description="""User's self-identified gender identity. +

Choices: +
CF = Female +
CM = Male +
TF = Trans Female +
TM = Trans Male +
NB = Non-binary +
OT = Other +
DA = Decline to answer + """, + ) + gender_other: str | None = Field( + default=None, + max_length=75, + description="Free-form text field for 'gender' value 'OT'", + ) + sexual_orien: SexualOrientationLabels | None = Field( + default=None, + description="""Self-identified sexual orientation of user. +

Choices: +
BS = Bisexual +
SG = SameGenderLoving +
US = Unsure +
HS = Heterosexual +
OT = Other +
DA = Decline to answer""", + ) + sexual_orien_other: str | None = Field( + default=None, + max_length=75, + description="Free-form text field for 'sexual_orien' value 'OT'", + ) + + class Config(Schema.Config): + extra = "forbid" + + # Check for valid zip code + @field_validator("zip") + def check_zip(cls, zipcode) -> str: + if zipcode not in SAN_FRANCISCO_ZIP_CODES: + raise ValueError("Invalid zip code") + return zipcode + + # Check for valid race + @model_validator(mode="after") + def validate_race(self) -> Self: + race_other = self.race_other + race = self.race + if race: + race = set(race) + diff = race - set(RaceLabels.__members__) + if diff: + raise ValueError(f"Invalid race selection '{diff}'") + self.race = list(race) + if "OT" in race and not race_other: + raise ValueError("Must specify 'other' race") + elif "OT" not in race and race_other: + raise ValueError( + "'race_other' should not be specified without race 'OT'" + ) + elif race_other: + raise ValueError( + "'race_other' should not be specified without 'race'" + ) + return self + + # Check for valid gender + @model_validator(mode="after") + def validate_gender(self) -> Self: + gender = self.gender + gender_other = self.gender_other + if gender is not None: + # if gender not in GenderLabels.__members__: + # raise ValueError(f"Invalid gender selection '{gender}'") + if gender == "OT": + if not gender_other: + raise ValueError("Must specify 'other' gender") + elif gender_other: + raise ValueError( + "'gender_other' should not be specified without 'OT'" + ) + elif gender_other: + raise ValueError( + "'gender_other' should not be specified without 'gender'" + ) + return self + + # Check for valid sex_orien + @model_validator(mode="after") + def validate_sex_orien(self) -> Self: + sexual_orien = self.sexual_orien + sexual_orien_other = self.sexual_orien_other + if sexual_orien is not None: + # if sexual_orien not in SexualOrientationLabels.__members__: + # raise ValueError( + # f"Invalid sexual orientation selection '{sexual_orien}'" + # ) + if sexual_orien == "OT": + if not sexual_orien_other: + raise ValueError("Must specify 'other' sexual orientation") + elif sexual_orien_other: + raise ValueError( + "'sexual_orien_other' should not be specified without 'OT'" + ) + elif sexual_orien_other: + raise ValueError( + "'sexual_orien_other' should not be specified without 'sexual_orien'" + ) + return self + + +class AccountPatchSchema(AccountSchema): + account_id: str = Field(description="Account id of the user's account") + name: str | None = Field( + default=None, min_length=1, max_length=250, description="User's name" + ) + email: str | None = Field( + default=None, decription="Email which uniquely identifies an account" + ) + zip: str | None = Field( + default=None, + min_length=5, + max_length=25, + decription="User's zip code", + ) + age: int | None = Field( + default=None, ge=1, le=200, description="User's age" + ) + is_latino: IsLatinoLabels | None = Field( + default=None, + description="""Is the user of Latin descent? +

Choices: +
YE = Yes +
NO = No +
DA = Decline to answer""", + ) + race: list[RaceLabels] | None = Field( + default=None, + description="""User's race. +

Choices: +
NA = American Indian or Alaska Native +
BL = Black +
AS = Asian +
PI = Native Hawaiian or other Pacific Islander +
WH = White +
OT = Other +
DA = Decline to answer""", + ) + race_other: str | None = Field( + default=None, + max_length=75, + description="Free-form text field for 'race' value 'OT'", + ) + gender: GenderLabels | None = Field( + default=None, + description="""User's self-identified gender identity. +

Choices: +
CF = Female +
CM = Male +
TF = Trans Female +
TM = Trans Male +
NB = Non-binary +
OT = Other +
DA = Decline to answer + """, + ) + gender_other: str | None = Field( + default=None, + max_length=75, + description="Free-form text field for 'gender' value 'OT'.", + ) + sexual_orien: SexualOrientationLabels | None = Field( + default=None, + description="""Self-identified sexual orientation of user. +

Choices: +
BS = Bisexual +
SG = SameGenderLoving +
US = Unsure +
HS = Heterosexual +
OT = Other +
DA = Decline to answer""", + ) + sexual_orien_other: str | None = Field( + default=None, + max_length=75, + description="Free-form text field for 'sexual_orien' value 'OT'.", + ) + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/admin.py b/home/views/apiv2/schemas/admin.py new file mode 100644 index 00000000..ab3a6c41 --- /dev/null +++ b/home/views/apiv2/schemas/admin.py @@ -0,0 +1,289 @@ +from datetime import date +from typing import Dict, List, Optional + +from django.db.models import QuerySet +from ninja import Field, FilterSchema, Schema +from ninja.errors import ValidationError +from pydantic import field_validator, model_validator +from typing_extensions import Self + + +class AdminMeSchema(Schema): + id: int = Field(description="Current logged in user's id") + username: str = Field(description="Current logged in user's username") + first_name: str = Field(description="Current logged in user's first name") + last_name: str = Field(description="Current logged in user's last name") + email: str = Field(description="Current logged in user's email") + + +class AdminHomeSchema(Schema): + accounts_count: int = Field(description="Total number of users") + accounts_steps: int = Field( + description="Total number of steps users walked" + ) + accounts_distance: float = Field( + description="Total distance users walked in miles" + ) + + +class HomeGraphFilter(Schema): + contest_id: str = Field( + default=None, + description="""The ID of the contest to filter by. +
This field is mutually exclusive with the date fields. +
For distance and step metrics, this will restrict the records + to the values recorded during the contest period's start and end date. +
For account metrics, this will restrict the records to the accounts that participated in the contest.""", + ) + start_date: date = Field( + default=None, + description="""The start date to filter the records by. +

**Note** start_date and end_date areThese fields are mutually exclusive with the contest_id field.""", + ) + end_date: date = Field( + default=None, + description="""The end date to filter the records by. +

**Note** start_date and end_date are mutually exclusive with the contest_id field.""", + ) + is_tester: bool = Field( + default=False, + description="If true, will only return records related to tester accounts.", + ) + + +class HomeGraphSchema(Schema): + date: str = Field(description="Date of data point") + count: int | str = Field(description="Count of data points") + + +class HomeGraphOutSchema(Schema): + results: list[HomeGraphSchema] = Field( + decription="List of output values used for graphs" + ) + + +class ContestOutSchema(Schema): + contest_id: str = Field( + description="The ID of the contest.", + ) + start: date = Field(description="Start date of the contest") + end: date = Field(description="End date of the contest") + + +class UsersInSchema(Schema): + contest_id: str = Field( + default=None, + description="""The ID of the contest to filter by. +
This field is mutually exclusive with the date fields. +
For distance and step metrics, this will restrict the records + to the values recorded during the contest period's start and end date. +
For account metrics, this will restrict the records to the accounts that participated in the contest.""", + ) + is_tester: bool = Field( + default=False, + description="If true, will only return records related to tester accounts.", + ) + order_by: str = None + page: int = None + query: str = None + + +class UsersOutSchema(Schema): + dw_count: int = Field(description="Total number of daily walk users") + dw_steps: int = Field( + description="Total number of steps users took on daily walks" + ) + dw_distance: float = Field( + description="Total distance in miles users took on daily walks" + ) + iw_count: int = Field( + default=None, + description="Total number of users that took intentional walks", + ) + iw_steps: int = Field( + default=None, + description="Total number of steps users took on intentional walks", + ) + iw_distance: float = Field( + default=None, + description="Total distance in miles users took on daily walks", + ) + iw_time: int = Field( + default=None, + description="Total amount of time users went on intentional walks", + ) + is_new: bool = Field( + default=None, + ) + is_active: bool = None + + +class UsersByZipInSchema(Schema): + contest_id: str = Field( + default=None, description="The ID of the contest to filter by." + ) + is_tester: bool = Field( + default=False, + description="If true, will only return records related to tester accounts.", + ) + + +class UsersByZipOutSchema(Schema): + total: dict[str, int] = Field( + description="Total number of users in each zip code" + ) + new: dict[str, int] = Field( + default=None, + description="Total number of users in each zip code from contest promo start to contest end date", + ) + + +class UsersByZipActiveInSchema(Schema): + contest_id: str = Field( + default=None, description="The ID of the contest to filter by." + ) + is_tester: bool = Field( + default=False, + description="If true, will only return records related to tester accounts.", + ) + + +class HistogramInSchema(Schema): + field: str = Field(description="The field to group by") + contest_id: str = Field( + default=None, + description="""The ID of the contest to filter by. +
This field is mutually exclusive with the date fields. +
For distance and step metrics, this will restrict the records + to the values recorded during the contest period's start and end date. +
For account metrics, this will restrict the records to the accounts that participated in the contest.""", + ) + is_tester: bool = Field( + default=False, + description="If true, will only return records related to tester accounts.", + ) + bin_size: int = Field( + default=None, + description="""The size of the bin to group the data by. Units will be the same as the field. +

**Note** this is mutually exclusive with the bin_count and bin_custom field.""", + ) + bin_count: int = Field( + default=None, + description="""The number of bins to group the data by. +

**Note** this is mutually exclusive with the bin_size and bin_custom field.""", + ) + bin_custom: str = Field( + default=None, + description="""A list of comma separated custom bin sizes in increasing order to group the data by. +

Example: 0,18,29,44,59 +

**Note** this is mutually exclusive with the bin_size and bin_count fields.""", + ) + + # Date fields to filter by, inclusive. + # These fields are mutually exclusive with the contest_id field. + start_date: date = Field( + default=None, + description="""The start date to filter the records by. +

**Note** start_date and end_date areThese fields are mutually exclusive with the contest_id field.""", + ) + end_date: date = Field( + default=None, + description="""The end date to filter the records by. +

**Note** start_date and end_date are mutually exclusive with the contest_id field.""", + ) + + @field_validator("bin_custom") + def str_to_list(cls, bin_custom): + try: + bin_custom: List[int] = [ + int(v) for v in bin_custom.split(",") if v + ] + except ValueError: + raise ValidationError( + {"bin_custom": f"bin_custom could not be parsed: {bin_custom}"} + ) + return bin_custom + + @model_validator(mode="after") + def validate_bin_param(self) -> Self: + bin_size = self.bin_size + bin_count = self.bin_count + bin_custom = self.bin_custom + if sum((bool(x) for x in (bin_size, bin_count, bin_custom))) > 1: + raise ValidationError( + { + "non_field_errors": "bin_size, bin_count and bin_custom are mutually exclusive." + } + ) + if not bin_size and not bin_count and not bin_custom: + raise ValidationError( + { + "non_field_errors": "bin_size, bin_count, or bin_custom is required." + } + ) + if bin_size and bin_size <= 0: + raise ValidationError( + {"bin_size": "bin_size must be greater than 0."} + ) + + if bin_count and bin_count < 2: + raise ValidationError( + {"bin_count": "bin_count must be greater than 1."} + ) + + if bin_custom: + increasing = all(a < b for a, b in zip(bin_custom, bin_custom[1:])) + if not increasing: + raise ValidationError( + { + "bin_custom": "bin_custom values must be in increasing order." + } + ) + if not all([x >= 0 for x in bin_custom]): + raise ValidationError( + {"bin_custom": "bin_custom values must be positive."} + ) + + return self + + +class Bins(Schema): + bin_idx: int = Field( + description="The start of the bin", + ) + bin_start: int = Field( + description="The end of the bin", + ) + bin_end: int = Field( + description="The end of the bin", + ) + count: int = Field( + description="The count in the bin", + ) + + +class HistogramOutSchema(Schema): + data: List[Bins] + bin_size: int = Field( + default=None, + description="""The size of the bin to group the data by. Units will be the same as the field. +

**Note** this is mutually exclusive with the bin_count and bin_custom field.""", + ) + bin_count: int = Field( + default=None, + description="""The number of bins to group the data by. +

**Note** this is mutually exclusive with the bin_size and bin_custom field.""", + ) + bin_custom: List[int] | None = Field( + default=None, + description="""A list of comma separated custom bin sizes in increasing order to group the data by. +

Example: 0,18,29,44,59 +

Note this is mutually exclusive with the bin_size and bin_count fields.""", + ) + unit: str = Field(description="The unit of measurement for the data") + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/contest.py b/home/views/apiv2/schemas/contest.py new file mode 100644 index 00000000..58230a34 --- /dev/null +++ b/home/views/apiv2/schemas/contest.py @@ -0,0 +1,27 @@ +from datetime import date +from typing import List, Optional + +from ninja import Field, ModelSchema, Schema +from pydantic import ValidationInfo, field_validator, model_validator +from typing_extensions import Self + +from home.models.contest import Contest + + +class ContestSchema(Schema): + contest_id: str = Field(description="Contest identifier") + start_baseline: date | None = Field( + description="Start of baseline period (prior to contest start)" + ) + start_promo: date = Field(description="Start date of promotion") + start: date = Field(description="Contest start date") + end: date = Field(decription="Contest end date") + + class Config(Schema.Config): + extra = "forbid" + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/dailywalk.py b/home/views/apiv2/schemas/dailywalk.py new file mode 100644 index 00000000..c6da9e32 --- /dev/null +++ b/home/views/apiv2/schemas/dailywalk.py @@ -0,0 +1,58 @@ +from datetime import date as date_type + +from ninja import Field, Schema +from pydantic import computed_field + + +class DailyWalkIn(Schema): + date: str = Field( + description="The specific date for which the steps are recorded" + ) + steps: int = Field(ge=0, description="Number of steps recorded") + distance: float = Field(ge=0, description="Total distance covered") + + class Config(Schema.Config): + extra = "forbid" + + +class DailyWalkInSchema(Schema): + account_id: str = Field( + description="Unique identifier for user's account." + ) + daily_walks: list[DailyWalkIn] = Field( + description="List of record daily walks: date, steps, distance." + ) + + class Config(Schema.Config): + extra = "forbid" + + +class DailyWalkOut(Schema): + date: date_type = Field( + description="The specific date for which the steps are recorded" + ) + steps: int = Field(ge=0, description="Number of steps recorded") + distance: float = Field(ge=0, description="Total distance covered") + + +class DailyWalkOutSchema(Schema): + daily_walks: list[DailyWalkOut] = Field( + default=None, + description="List of record daily walks: date, steps, distance.", + ) + + @computed_field + @property + def total_steps(self) -> int: + return sum(daily_walk.steps for daily_walk in self.daily_walks) + + @computed_field + @property + def total_distance(self) -> float: + return sum(daily_walk.distance for daily_walk in self.daily_walks) + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/device.py b/home/views/apiv2/schemas/device.py new file mode 100644 index 00000000..8a9f6526 --- /dev/null +++ b/home/views/apiv2/schemas/device.py @@ -0,0 +1,117 @@ +from datetime import datetime + +from ninja import Field, Schema +from typing_extensions import Self + + +class DeviceInSchema(Schema): + device_id: str = Field( + max_length=250, + description="""A unique id generated by the app when it is first installed" + on a device. Used for authentication on subsequent calls.""", + ) + device_model: str | None = Field( + default=None, + max_length=25, + description="""Unique identifier for the device's model. +
iOS: "iPhone7,2" +
Android: "goldfish" +
Windows: "Y3R94UC#AC4" + """, + ) + manufacturer: str | None = Field( + default=None, + max_length=25, + description="""Manufacturer of the device. +

getManufacturer() - Gets the device manufacturer +
iOS: "Apple" +
Android: "Google" +
Windows: ? + """, + ) + os_name: str | None = Field( + default=None, + max_length=25, + description="""Operating system of the device. +

getSystemName() - Gets the device OS name. +
iOS: "iOS" on newer iOS devices "iPhone OS" on older devices + (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher. +
Android: "Android" +
Windows: ? + """, + ) + os_version: str | None = Field( + default=None, + max_length=25, + description="""Device operating system version. +

getSystemVersion() - Gets the device OS version. +
iOS: "11.0" +
Android: "7.1.1" +
Windows: ? + """, + ) + + class Config(Schema.Config): + extra = "forbid" + + +class DeviceOutSchema(Schema): + device_id: str = Field( + max_length=250, + description="""A unique id generated by the app when it is first installed" + on a device. Used for authentication on subsequent calls.""", + ) + created: datetime = Field( + description="When the record was created/device was registered" + ) + device_model: str | None = Field( + default=None, + max_length=25, + description="""Unique identifier for the device's model. +

getDeviceid() - Gets the device ID. +
iOS: "iPhone7,2" +
Android: "goldfish" +
Windows: "Y3R94UC#AC4" + """, + ) + manufacturer: str | None = Field( + default=None, + max_length=25, + description="""Manufacturer of the device. +

getManufacturer() - Gets the device manufacturer +
iOS: "Apple" +
Android: "Google" +
Windows: ? + """, + ) + os_name: str | None = Field( + default=None, + max_length=25, + description="""Operating system of the device. +

getSystemName() - Gets the device OS name. +
iOS: "iOS" on newer iOS devices "iPhone OS" on older devices + (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher. +
Android: "Android" +
Windows: ? + """, + ) + os_version: str | None = Field( + default=None, + max_length=25, + description="""Device operating system version. +

getSystemVersion() - Gets the device OS version. +
iOS: "11.0" +
Android: "7.1.1" +
Windows: ? + """, + ) + + class Config(Schema.Config): + extra = "forbid" + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/intentionalwalk.py b/home/views/apiv2/schemas/intentionalwalk.py new file mode 100644 index 00000000..0002819c --- /dev/null +++ b/home/views/apiv2/schemas/intentionalwalk.py @@ -0,0 +1,100 @@ +from datetime import datetime +from typing import List, Optional + +from ninja import Field, Schema +from pydantic import computed_field + + +class IntentionalWalkInBaseSchema(Schema): + event_id: str = Field( + max_length=250, + description="v4 random uuid generated on the client.", + ) + start: datetime = Field( + description="Timestamp when the intentional walk started." + ) + end: datetime = Field( + descripition="Timestamp when the intentional walk ended." + ) + steps: int = Field(ge=0, description="Number of steps recorded.") + pause_time: float = Field( + ge=0, description="Total time paused (in seconds)." + ) + distance: float = Field(ge=0, description="Total distance covered.") + + class Config(Schema.Config): + extra = "forbid" + + +class IntentionalWalkInSchema(Schema): + account_id: str = Field( + max_length=250, + description="Account id of the account the data is linked to", + ) + intentional_walks: list[IntentionalWalkInBaseSchema] = Field( + description="List of recorded intentional walks." + ) + + +class IntentionalWalkOutBaseSchema(Schema): + event_id: str = Field( + max_length=250, description="v4 random uuid generated on the client." + ) + start: datetime = Field( + description="Timestamp when the intentional walk started." + ) + end: datetime = Field( + descripition="Timestamp when the intentional walk ended." + ) + steps: int = Field(ge=0, description="Number of steps recorded.") + pause_time: float = Field( + ge=0, description="Total time paused (in seconds)." + ) + walk_time: float = Field( + ge=0, description="Total time walked not including pause time." + ) + distance: float = Field(ge=0, description="Total distance covered.") + + +class IntentionalWalkOutSchema(Schema): + intentional_walks: list[IntentionalWalkOutBaseSchema] = Field( + description="List of recorded intentional walks." + ) + + @computed_field + @property + def total_steps(self) -> int: + return sum( + intentional_walk.steps + for intentional_walk in self.intentional_walks + ) + + @computed_field + @property + def total_walk_time(self) -> float: + return sum( + intentional_walk.walk_time + for intentional_walk in self.intentional_walks + ) + + @computed_field + @property + def total_pause_time(self) -> float: + return sum( + intentional_walk.pause_time + for intentional_walk in self.intentional_walks + ) + + @computed_field + @property + def total_distance(self) -> float: + return sum( + intentional_walk.distance + for intentional_walk in self.intentional_walks + ) + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/leaderboard.py b/home/views/apiv2/schemas/leaderboard.py new file mode 100644 index 00000000..85c9420c --- /dev/null +++ b/home/views/apiv2/schemas/leaderboard.py @@ -0,0 +1,31 @@ +from typing import Optional + +from ninja import Field, Schema +from typing_extensions import Self + + +class LeaderboardUserSchema(Schema): + account_id: int = Field( + desription="Account id of the account the data is linked to." + ) + steps: int = Field(description="Number of steps recorded.") + rank: int = Field(description="The rank of the user for most walks.") + device_id: str = Field( + default=None, + description="Device id of the device the data is coming from.", + ) + + class Config(Schema.Config): + extra = "forbid" + + +class LeaderboardSchema(Schema): + leaderboard: list[LeaderboardUserSchema] = Field( + description="Ranked list of users on the leaderboard." + ) + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/schemas/weeklygoal.py b/home/views/apiv2/schemas/weeklygoal.py new file mode 100644 index 00000000..9da0d659 --- /dev/null +++ b/home/views/apiv2/schemas/weeklygoal.py @@ -0,0 +1,83 @@ +from datetime import date +from typing import Optional + +from ninja import Field, Schema +from pydantic import field_validator, model_validator +from typing_extensions import Self + + +class WeeklyGoalIn(Schema): + start_of_week: str = Field( + description="The start of the week for the goal." + ) + steps: int = Field(ge=0, description="Step goal for the week.") + days: int = Field( + ge=0, description="Number of days per week to reach goal." + ) + + class Config(Schema.Config): + extra = "forbid" + + +class WeeklyGoalInSchema(Schema): + account_id: str = Field( + description="Account id of the account the data is linked to." + ) + weekly_goal: WeeklyGoalIn = Field( + description="Start week, steps, and days of the weekly goal." + ) + + class Config(Schema.Config): + extra = "forbid" + + +class WeeklyGoalOut(Schema): + start_of_week: date = Field( + description="The start of the week for the goal." + ) + steps: int = Field(ge=0, description="Step goal for the week.") + days: int = Field( + ge=0, description="Number of days per week to reach goal." + ) + + +class WeeklyGoalOutSchema(Schema): + account_id: int = Field( + description="Account id of the account the data is linked to." + ) + weekly_goal: WeeklyGoalOut = Field( + description="Start week, steps, and days of the weekly goal." + ) + + +class WeeklyGoalListInSchema(Schema): + account_id: str = Field( + description="Account id of the account the data is linked to." + ) + + class Config(Schema.Config): + extra = "forbid" + + +class WeeklyGoalOutList(Schema): + id: int = Field(description="Unique id for the set weekly goal.") + start_of_week: str = Field( + description="The start of the week for the goal." + ) + steps: int = Field(description="Step goal for the week.") + days: int = Field(description="Number of days per week to reach goal.") + account_id: int = Field( + description="Account id of the account the data is linked to." + ) + + +class WeeklyGoalListOutSchema(Schema): + weekly_goals: list[WeeklyGoalOutList] = Field( + description="List of user's weekly goals." + ) + + +class ErrorSchema(Schema): + message: str = Field( + description="Error message to display", + ) diff --git a/home/views/apiv2/weeklygoal.py b/home/views/apiv2/weeklygoal.py new file mode 100644 index 00000000..d754ed05 --- /dev/null +++ b/home/views/apiv2/weeklygoal.py @@ -0,0 +1,105 @@ +from datetime import datetime + +from django.views.decorators.csrf import csrf_exempt +from ninja import Router +from ninja.errors import HttpError + +from home.models import Device, WeeklyGoal +from home.utils.dates import DATE_FORMAT, get_start_of_week +from home.views.apiv2.schemas.weeklygoal import ( + ErrorSchema, + WeeklyGoalInSchema, + WeeklyGoalListInSchema, + WeeklyGoalListOutSchema, + WeeklyGoalOutSchema, +) + +router = Router() + + +@router.post("", response={201: WeeklyGoalOutSchema, 404: ErrorSchema}) +@csrf_exempt +def create_weekly_goal(request, payload: WeeklyGoalInSchema): + """Create or update a weeklygoal for an account""" + json_data = payload.dict() + # Get the device + try: + device = Device.objects.get(device_id=json_data["account_id"]) + account = device.account + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{json_data['account_id']}. " + f"Please register first!" + ), + ) + + # Json response template + json_response = {"account_id": account.id, "weekly_goal": {}} + + weekly_goal_update = json_data["weekly_goal"] + + start_of_week = weekly_goal_update["start_of_week"] + start_of_week_update = get_start_of_week( + datetime.strptime(start_of_week, DATE_FORMAT).date() + ) + steps_update = weekly_goal_update["steps"] + days_update = weekly_goal_update["days"] + + # Check if there's already a goal for the week. If there is, + # update the entry. + try: + weekly_goal = WeeklyGoal.objects.get( + account=account, + start_of_week=start_of_week_update, + ) + weekly_goal.steps = steps_update + weekly_goal.days = days_update + weekly_goal.save() + except WeeklyGoal.DoesNotExist: + # Creation if object is missing + weekly_goal = WeeklyGoal.objects.create( + start_of_week=start_of_week_update, + steps=steps_update, + days=days_update, + account=account, + ) + + # Update the json object + json_response["weekly_goal"] = { + "start_of_week": weekly_goal.start_of_week, + "steps": weekly_goal.steps, + "days": weekly_goal.days, + } + return 201, json_response + + +@router.get("", response={200: WeeklyGoalListOutSchema, 404: ErrorSchema}) +@csrf_exempt +def get_weekly_goals(request, payload: WeeklyGoalListInSchema): + """Get List of Weekly Goals""" + json_data = payload.dict() + # Get the device + try: + device = Device.objects.get(device_id=json_data["account_id"]) + account = device.account + except Device.DoesNotExist: + raise HttpError( + 404, + ( + f"Unregistered device - " + f"{json_data['account_id']}. " + f"Please register first!" + ), + ) + + # Get weekly goals tied to this account + weekly_goals = list(WeeklyGoal.objects.filter(account=account).values()) + """ for goal in weekly_goals: + goal = model_to_dict(goal) """ + for goal in weekly_goals: + goal["start_of_week"] = goal["start_of_week"].strftime(DATE_FORMAT) + + return 200, {"weekly_goals": weekly_goals} diff --git a/poetry.lock b/poetry.lock index e9c71c40..2338e0f6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,18 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "asgiref" @@ -289,22 +303,24 @@ argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] -name = "django-postgres-setfield" -version = "0.0.1" -description = "Django field for storing sets, backed by Postgres arrays" +name = "django-ninja" +version = "1.2.0" +description = "Django Ninja - Fast Django REST framework" optional = false -python-versions = "*" -files = [] -develop = false +python-versions = ">=3.7" +files = [ + {file = "django_ninja-1.2.0-py3-none-any.whl", hash = "sha256:aa8ca853bf189cc13247b1922ca41f1a6e60056c90041ec0081cce0ea8d8de02"}, + {file = "django_ninja-1.2.0.tar.gz", hash = "sha256:0a7b7e20202f50546ae45af6a262fccb606c9e75b8bbe9fe38d49075a9435bf0"}, +] [package.dependencies] -Django = ">=2.1" +Django = ">=3.1" +pydantic = ">=2.0,<3.0.0" -[package.source] -type = "git" -url = "https://github.com/benperlman/django-postgres-setfield.git" -reference = "HEAD" -resolved_reference = "5969124d3da75a5ce1e5b84806f2fd2f8b7b532e" +[package.extras] +dev = ["pre-commit"] +doc = ["markdown-include", "mkdocs", "mkdocs-material", "mkdocstrings"] +test = ["django-stubs", "mypy (==1.7.1)", "psycopg2-binary", "pytest", "pytest-asyncio", "pytest-cov", "pytest-django", "ruff (==0.4.2)"] [[package]] name = "djangorestframework" @@ -550,6 +566,116 @@ files = [ {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] +[[package]] +name = "pydantic" +version = "2.7.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.18.4" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pyflakes" version = "2.5.0" @@ -761,13 +887,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.3.0" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -824,4 +950,4 @@ brotli = ["Brotli"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "5c86a2e78c42307fdf84a73f8b7b3f0ce6097ec5d4a49457a584d42c00f170b8" +content-hash = "f157465d10b1babb348ce641fc0993096bae49660394bed5450118ff1a3fb553" diff --git a/pyproject.toml b/pyproject.toml index 1b28be54..ae1c9376 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,38 +1,38 @@ -[tool.poetry] -name = "intentional-walk" -version = "0.1.0" -description = "" -authors = ["Your Name "] - -[tool.poetry.dependencies] -python = "^3.8" -django = ">=4.2.0,<5.0" -psycopg2 = "^2.9.1" -dj-database-url = "^0.5.0" -python-dotenv = "^0.11.0" -pytest-django = "^3.10.0" -coveralls = "^1.11.1" -coverage = "^5.0.3" -gunicorn = "^20.0.4" -python-dateutil = "^2.8.1" -whitenoise = "^5.0.1" -uuid = "^1.30" -faker = "^8.11.0" -django-postgres-setfield = {git = "https://github.com/benperlman/django-postgres-setfield.git"} -black = "^22.8.0" -flake8 = "^5.0.4" -autoflake = "^1.5.3" -djangorestframework = "^3.14.0" - -[tool.poetry.dev-dependencies] -freezegun = "^1.1.0" -pytest = "^7.1.3" -libfaketime = "^2.0.0" -pytest-libfaketime = "^0.1.2" - -[tool.black] -line-length = 79 - -[build-system] -requires = ["poetry>=0.12"] -build-backend = "poetry.masonry.api" +[tool.poetry] +name = "intentional-walk" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.8" +django = ">=4.2.0,<5.0" +psycopg2 = "^2.9.1" +dj-database-url = "^0.5.0" +python-dotenv = "^0.11.0" +pytest-django = "^3.10.0" +coveralls = "^1.11.1" +coverage = "^5.0.3" +gunicorn = "^20.0.4" +python-dateutil = "^2.8.1" +whitenoise = "^5.0.1" +uuid = "^1.30" +faker = "^8.11.0" +black = "^22.8.0" +flake8 = "^5.0.4" +autoflake = "^1.5.3" +djangorestframework = "^3.14.0" +django-ninja = "^1.1.0" + +[tool.poetry.dev-dependencies] +freezegun = "^1.1.0" +pytest = "^7.1.3" +libfaketime = "^2.0.0" +pytest-libfaketime = "^0.1.2" + +[tool.black] +line-length = 79 + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" diff --git a/requirements.txt b/requirements.txt index 2139ea02..2484f365 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +annotated-types==0.7.0 ; python_version >= "3.8" and python_version < "4.0" asgiref==3.7.2 ; python_version >= "3.8" and python_version < "4.0" attrs==21.2.0 ; python_version >= "3.8" and python_version < "4.0" autoflake==1.5.3 ; python_version >= "3.8" and python_version < "4.0" @@ -10,7 +11,7 @@ colorama==0.4.4 ; python_version >= "3.8" and python_version < "4.0" and (sys_pl coverage==5.5 ; python_version >= "3.8" and python_version < "4" coveralls==1.11.1 ; python_version >= "3.8" and python_version < "4.0" dj-database-url==0.5.0 ; python_version >= "3.8" and python_version < "4.0" -django-postgres-setfield @ git+https://github.com/benperlman/django-postgres-setfield.git@5969124d3da75a5ce1e5b84806f2fd2f8b7b532e ; python_version >= "3.8" and python_version < "4.0" +django-ninja==1.2.0 ; python_version >= "3.8" and python_version < "4.0" django==4.2.11 ; python_version >= "3.8" and python_version < "4.0" djangorestframework==3.14.0 ; python_version >= "3.8" and python_version < "4.0" docopt==0.6.2 ; python_version >= "3.8" and python_version < "4.0" @@ -28,6 +29,8 @@ pluggy==0.13.1 ; python_version >= "3.8" and python_version < "4.0" psycopg2==2.9.2 ; python_version >= "3.8" and python_version < "4.0" py==1.11.0 ; python_version >= "3.8" and python_version < "4.0" pycodestyle==2.9.1 ; python_version >= "3.8" and python_version < "4.0" +pydantic-core==2.18.4 ; python_version >= "3.8" and python_version < "4.0" +pydantic==2.7.4 ; python_version >= "3.8" and python_version < "4.0" pyflakes==2.5.0 ; python_version >= "3.8" and python_version < "4.0" pyparsing==2.4.7 ; python_version >= "3.8" and python_version < "4.0" pytest-django==3.10.0 ; python_version >= "3.8" and python_version < "4.0" @@ -42,7 +45,7 @@ sqlparse==0.4.2 ; python_version >= "3.8" and python_version < "4.0" text-unidecode==1.3 ; python_version >= "3.8" and python_version < "4.0" toml==0.10.2 ; python_version >= "3.8" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.8" and python_version < "4.0" -typing-extensions==4.3.0 ; python_version >= "3.8" and python_version < "3.11" +typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.8" and python_version < "4.0" and sys_platform == "win32" urllib3==1.26.7 ; python_version >= "3.8" and python_version < "4" uuid==1.30 ; python_version >= "3.8" and python_version < "4.0" diff --git a/scripts/dummydata.py b/scripts/dummydata.py index 249f13bf..bcedaa23 100644 --- a/scripts/dummydata.py +++ b/scripts/dummydata.py @@ -20,18 +20,20 @@ Run this script with `--help` for a list of options. """ + import argparse import binascii +import json import os import sys import traceback from calendar import monthrange from collections import OrderedDict from datetime import date, datetime -from random import randint -from typing import Any, Dict, List, Tuple from itertools import groupby from operator import itemgetter +from random import randint +from typing import Any, Dict, List, Tuple from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta @@ -120,7 +122,7 @@ float: lambda x: x, bool: lambda x: str(x).lower(), date: lambda x: str(x), - list: lambda x: f"{{{', '.join(x)}}}", + list: lambda x: json.dumps(x), } @@ -218,6 +220,7 @@ def account(self, **kwargs) -> Tuple[str, Tuple[Any]]: ) self.accounts.append(acct) + # print(f"RACE: {args.get('race')}") return insert( "home_account", diff --git a/server/urls.py b/server/urls.py index c1908bd4..1a3c0326 100644 --- a/server/urls.py +++ b/server/urls.py @@ -1,23 +1,48 @@ -"""server URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/3.0/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -from django.contrib import admin -from django.urls import include, path - -urlpatterns = [ - path("", include("home.urls")), - path("admin/doc/", include("django.contrib.admindocs.urls")), - path("admin/", admin.site.urls), -] +"""server URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" + +from django.contrib import admin +from django.urls import include, path +from ninja import NinjaAPI + +from home.views.apiv2.appuser import router as appuser_router +from home.views.apiv2.contest import router as contest_router +from home.views.apiv2.dailywalk import router as daily_walk_router +from home.views.apiv2.device import router as device_router +from home.views.apiv2.export import router as export_router +from home.views.apiv2.intentionalwalk import router as intentional_walk_router +from home.views.apiv2.leaderboard import router as leaderboard_router +from home.views.apiv2.weeklygoal import router as weeklygoal_router +from home.views.apiv2.admin import router as admin_router + +api = NinjaAPI() +api.add_router("/appuser", appuser_router) +api.add_router("/contest", contest_router) +api.add_router("/device", device_router) +api.add_router("/dailywalk", daily_walk_router) +api.add_router("/export", export_router) +api.add_router("/intentionalwalk", intentional_walk_router) +api.add_router("/leaderboard", leaderboard_router) +api.add_router("/weeklygoal", weeklygoal_router) +api.add_router("/admin", admin_router) + + +urlpatterns = [ + path("", include("home.urls")), + path("admin/doc/", include("django.contrib.admindocs.urls")), + path("admin/", admin.site.urls), + path("api/v2/", api.urls), +] From 36c23d88dcaf98badd0196a332f7ee82441cd671 Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Thu, 4 Jul 2024 10:18:36 -0700 Subject: [PATCH 2/9] Fixed response schema validation error for date field --- home/views/apiv2/dailywalk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/home/views/apiv2/dailywalk.py b/home/views/apiv2/dailywalk.py index 72ed5a3f..2dfd36ef 100644 --- a/home/views/apiv2/dailywalk.py +++ b/home/views/apiv2/dailywalk.py @@ -44,7 +44,7 @@ def get_daily_walks(request, account_id: str): return 200, {"daily_walks": list(daily_walks)} -@router.post("", response={201: DailyWalkInSchema}) +@router.post("", response={201: DailyWalkOutSchema}) @csrf_exempt def create_daily_walk(request, payload: DailyWalkInSchema): json_data = payload.dict() From 7385fa94c0f1609f3abe4bf8f6d2055678f632fa Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Fri, 5 Jul 2024 00:18:59 -0700 Subject: [PATCH 3/9] Fixed admin/users api and tests --- home/tests/integration/appuser/test_update.py | 2 +- home/tests/integration/views/web/test_data.py | 4 +- home/utils/generators.py | 2 +- home/views/api/admin.py | 24 ++- home/views/api/appuser.py | 2 +- home/views/api/appuserv2.py | 126 --------------- home/views/api/schemas/account.py | 145 ------------------ .../api/serializers/response_serializers.py | 4 +- home/views/apiv2/admin.py | 52 +++++++ home/views/apiv2/schemas/account.py | 7 +- home/views/apiv2/schemas/admin.py | 143 +++++++++++++++-- home/views/apiv2/schemas/contest.py | 13 +- home/views/apiv2/schemas/dailywalk.py | 12 +- home/views/apiv2/schemas/device.py | 11 +- home/views/apiv2/schemas/intentionalwalk.py | 7 +- home/views/apiv2/schemas/leaderboard.py | 6 +- home/views/apiv2/schemas/weeklygoal.py | 19 +-- 17 files changed, 236 insertions(+), 343 deletions(-) delete mode 100644 home/views/api/appuserv2.py delete mode 100644 home/views/api/schemas/account.py diff --git a/home/tests/integration/appuser/test_update.py b/home/tests/integration/appuser/test_update.py index 14ccc5af..8c8ed907 100644 --- a/home/tests/integration/appuser/test_update.py +++ b/home/tests/integration/appuser/test_update.py @@ -100,7 +100,7 @@ def test_update_appuser_demographics(self): msg=fail_message, ) self.assertSetEqual( - user_obj.race, set(request_params["race"]), msg=fail_message + set(user_obj.race), set(request_params["race"]), msg=fail_message ) # Test updating a User's age diff --git a/home/tests/integration/views/web/test_data.py b/home/tests/integration/views/web/test_data.py index 5057f58a..239c6d77 100644 --- a/home/tests/integration/views/web/test_data.py +++ b/home/tests/integration/views/web/test_data.py @@ -7,6 +7,7 @@ from django.contrib.auth.models import User from django.test import Client, TestCase from freezegun import freeze_time +from pytz import utc from home.utils.generators import ( AccountGenerator, @@ -16,7 +17,6 @@ IntentionalWalkGenerator, ) from home.views.web.data import USER_AGG_CSV_BASE_HEADER -from pytz import utc class Login: @@ -145,7 +145,7 @@ def test_user_agg_csv_view(self): ) # Empty walk data self.assertEqual(row, expected_row) self.assertIn( - row["Race"], {"{'OT', 'BL'}", "{'BL', 'OT'}"} + row["Race"], {"['OT', 'BL']", "['BL', 'OT']"} ) # order is non-deterministic else: diff --git a/home/utils/generators.py b/home/utils/generators.py index bf21ac74..56b59557 100644 --- a/home/utils/generators.py +++ b/home/utils/generators.py @@ -57,7 +57,7 @@ def random_params(self): age=random.randint(10, 100), gender=gender_background, gender_other="Gender Queer" if gender_background == "OT" else None, - race=racial_background, + race=list(racial_background), race_other="Middle Eastern" if "OT" in racial_background else None, sexual_orien=random.choice([en.name for en in self.sexual_oriens]), is_latino=random.choice([en.name for en in self.ethnicity]), diff --git a/home/views/api/admin.py b/home/views/api/admin.py index cdf2c899..9c57f1a5 100644 --- a/home/views/api/admin.py +++ b/home/views/api/admin.py @@ -9,16 +9,22 @@ from django.db.models.functions import Concat, TruncDate from django.http import HttpRequest, HttpResponse, JsonResponse from django.views import View +from rest_framework import status +from rest_framework.response import Response from home.models import Account, Contest, DailyWalk from home.models.intentionalwalk import IntentionalWalk from home.models.leaderboard import Leaderboard -from home.views.api.histogram.serializers import (HistogramReqSerializer, - ValidatedHistogramReq) -from home.views.api.serializers.request_serializers import \ - GetUsersReqSerializer -from home.views.api.serializers.response_serializers import \ - GetUsersRespSerializer +from home.views.api.histogram.serializers import ( + HistogramReqSerializer, + ValidatedHistogramReq, +) +from home.views.api.serializers.request_serializers import ( + GetUsersReqSerializer, +) +from home.views.api.serializers.response_serializers import ( + GetUsersRespSerializer, +) from .utils import paginate, require_authn @@ -294,7 +300,7 @@ def get(self, request, *args, **kwargs): class AdminUsersView(View): http_method_names = ["get"] - # @require_authn + @require_authn def get(self, request, *args, **kwargs): serializer = GetUsersReqSerializer(data=request.GET) if not serializer.is_valid(): @@ -338,7 +344,9 @@ def update_user_dto(dto, iw_stats): for dto, iw_stat in zip(query, iw_query) ] resp = GetUsersRespSerializer(result_dto, many=True) - response = JsonResponse(resp.data, safe=False) + # response = JsonResponse(resp.data, safe=False) + response = JsonResponse(result_dto, safe=False) + if links: response.headers["Link"] = links diff --git a/home/views/api/appuser.py b/home/views/api/appuser.py index 7d5d7d14..53f57b4e 100644 --- a/home/views/api/appuser.py +++ b/home/views/api/appuser.py @@ -272,7 +272,7 @@ def post(self, request, *args, **kwargs): "zip": account.zip, "age": account.age, "is_latino": account.is_latino, - "race": list(account.race), + "race": account.race, "race_other": account.race_other, "gender": account.gender, "gender_other": account.gender_other, diff --git a/home/views/api/appuserv2.py b/home/views/api/appuserv2.py deleted file mode 100644 index 9a3f89e1..00000000 --- a/home/views/api/appuserv2.py +++ /dev/null @@ -1,126 +0,0 @@ -import json -from datetime import date -from typing import List - -from django.core.exceptions import ObjectDoesNotExist -from django.shortcuts import get_object_or_404 -from ninja import Router -from ninja.errors import HttpError - -from home.models import Account, Device -from home.models.account import ( - SAN_FRANCISCO_ZIP_CODES, - GenderLabels, - IsLatinoLabels, - RaceLabels, - SexualOrientationLabels, -) -from home.views.api.schemas.account import AccountSchema, DeviceSchema - -router = Router() - - -# Determines whether Account is tester account, based on name prefix -def is_tester(name_field: str) -> bool: - possible_prefixes = ["tester-", "tester ", "tester_"] - return any( - [name_field.lower().startswith(prefix) for prefix in possible_prefixes] - ) - - -def update_account(account: Account, json_data: dict): - # Data fields vary based on registration screen - for attr, value in json_data.items(): - if attr != "email": - setattr(account, attr, value) - account.save() - - -@router.post("/appuser", response={201: AccountSchema}) -def create_appuser(request, payload: AccountSchema): - # Parse the body json - json_data = payload.dict() - # For a participating device - try: - # NOTE: Account id here maps to a device id. Perhaps the API - # definition could be changed in the future. - # Get the registered device if it exists - device = Device.objects.get(device_id=json_data["account_id"]) - # If it is an email update fail and return - if device.account.email.lower() != json_data["email"].lower(): - raise HttpError(400, "Email cannot be updated. Contact admin") - - # Otherwise, update the account's other details - account = Account.objects.get(email__iexact=json_data["email"]) - update_account(account, json_data) - return 201, {"account_id": device.device_id, **account.__dict__} - - # This implies that it is a new device - except Device.DoesNotExist: - # Check if the user account exists. If not, create it - try: - account = Account.objects.get(email__iexact=json_data["email"]) - update_account(account, json_data) - message = "Account updated successfully" - account_updated = True - except Account.DoesNotExist: - # Partially create account first, with required fields - account = Account.objects.create( - email=json_data["email"], - name=json_data["name"], - zip=json_data["zip"], - age=json_data["age"], - is_tester=is_tester(json_data["name"]), - is_sf_resident=json_data["zip"] in SAN_FRANCISCO_ZIP_CODES, - ) - account_updated = False - - # Create a new device object and link it to the account - device = Device.objects.create( - device_id=json_data["account_id"], account=account - ) - - # return 201, { - # "account_id": device.device_id, - # "name": account.name, - # "email": account.email, - # "zip": account.zip, - # "age": account.age, - # "is_latino": account.is_latino, - # "race": account.race, - # "race_other": account.race_other, - # "gender": account.gender, - # "gender_other": account.gender_other, - # "sexual_orien": account.sexual_orien, - # "sexual_orien_other": account.sexual_orien_other, - # } - - return 201, {"account_id": device.device_id, **account.__dict__} - - -# @router.get("/employees/{employee_id}", response=EmployeeOut) -# def get_employee(request, employee_id: int): -# employee = get_object_or_404(Employee, id=employee_id) -# return employee - - -# @router.get("/employees", response=List[EmployeeOut]) -# def list_employees(request): -# qs = Employee.objects.all() -# return qs - - -@router.put("/appuser/{account_id}") -def update_appuser(request, account_id: int, payload: AccountSchema): - account = get_object_or_404(Account, id=account_id) - for attr, value in payload.dict().items(): - setattr(account, attr, value) - account.save() - return account - - -@router.delete("/appuser/{account_id}") -def delete_appuser(request, account_id: int): - account = get_object_or_404(Account, id=account_id) - account.delete() - return {"success": True} diff --git a/home/views/api/schemas/account.py b/home/views/api/schemas/account.py deleted file mode 100644 index d102d506..00000000 --- a/home/views/api/schemas/account.py +++ /dev/null @@ -1,145 +0,0 @@ -from ninja import ModelSchema, Schema -from pydantic import field_validator -from typing import Optional - -from home.models.account import Account, SAN_FRANCISCO_ZIP_CODES -from home.models.device import Device - - -class DeviceSchema(ModelSchema): - class Meta: - model = Device - fields = ( - "device_id", - "account", - "created" - ) - - -# class AccountSchema(ModelSchema): -# account_id: str -# # is_test: bool -# # race = list - -# class Meta: -# model = Account -# fields = ( -# "name", -# "email", -# "zip", -# "age", -# "is_latino", -# "race", -# "race_other", -# "gender", -# "gender_other", -# "sexual_orien", -# "sexual_orien_other", -# ) - - # @validator("account_id") - # def redefine_account_id(cls, v): - # return v.device_id - -class AccountSchema(Schema): - account_id: str - name: str - email: str - zip: str - age: int - is_latino: Optional[str] = None - race: Optional[list] = None - race_other: Optional[str] = None - gender: Optional[str] = None - gender_other: Optional[str] = None - sexual_orien: Optional[str] = None - sexual_orien_other: Optional[str] = None - - # Check for valid zip code - @field_validator("zip") - def check_zip(cls, zipcode): - if zipcode not in SAN_FRANCISCO_ZIP_CODES: - raise ValueError("Invalid zip code") - # assert zipcode in SAN_FRANCISCO_ZIP_CODES, "Invalid zip code" - return zipcode - - # Check for valid age range - # @validator("age") - # def valid_ - # ): # Required field but existence checked in validate_request_json - # assert data["age"] > 1 and data["age"] < 200, "Invalid age" - # if data.get("is_latino") is not None: - # is_latino = data["is_latino"] - # assert ( - # is_latino in IsLatinoLabels.__members__ - # ), f"Invalid is latino or hispanic selection '{is_latino}'" - # if data.get("race") is not None: - # for item in data["race"]: - # assert ( - # item in RaceLabels.__members__ - # ), f"Invalid race selection '{item}'" - # if "OT" in data["race"]: - # assert ( - # len(data.get("race_other", "")) > 0 - # ), "Must specify 'other' race" - # else: - # assert ( - # data.get("race_other") is None - # ), "'race_other' should not be specified without race 'OT'" - # elif data.get("race_other") is not None: - # assert False, "'race_other' should not be specified without 'race'" - # if data.get("gender") is not None: - # gender = data["gender"] - # assert ( - # gender in GenderLabels.__members__ - # ), f"Invalid gender selection '{gender}'" - # if data["gender"] == "OT": - # assert ( - # len(data.get("gender_other", "")) > 0 - # ), "Must specify 'other' gender" - # else: - # assert ( - # data.get("gender_other") is None - # ), "'gender_other' should not be specified without 'OT'" - # elif data.get("gender_other") is not None: - # assert False, "'gender_other' should not be specified without 'gender'" - # if data.get("sexual_orien") is not None: - # sexual_orientation = data["sexual_orien"] - # assert ( - # sexual_orientation in SexualOrientationLabels.__members__ - # ), f"Invalid sexual orientation selection '{sexual_orientation}'" - # if data["sexual_orien"] == "OT": - # assert ( - # len(data.get("sexual_orien_other", "")) > 0 - # ), "Must specify 'other' sexual orientation" - # else: - # assert ( - # data.get("sexual_orien_other") is None - # ), "'sexual_orien_other' should not be specified without 'OT'" - # elif data.get("sexual_orien_other") is not None: - # assert ( - # False - # ), "'sexual_orien_other' should not be specified without 'gender'" - - - -# class AccountOut(ModelSchema): -# class Meta: -# model = Account -# fields = ( -# "name", -# "email", -# "zip", -# "age", -# "is_latino", -# "race", -# "race_other", -# "gender", -# "gender_other", -# "sexual_orien", -# "sexual_orien_other", -# ) - - -class Error(Schema): - message: str diff --git a/home/views/api/serializers/response_serializers.py b/home/views/api/serializers/response_serializers.py index e4ecfbd8..874ae233 100644 --- a/home/views/api/serializers/response_serializers.py +++ b/home/views/api/serializers/response_serializers.py @@ -6,7 +6,9 @@ to JSON-compatible data types that can be sent in the HTTP response, and to clearly document the structure of the response data. """ + from rest_framework import serializers + from home.models import Account @@ -34,4 +36,4 @@ class GetUsersRespSerializer(serializers.ModelSerializer): class Meta: model = Account - fields = "__all__" + fields = ("name", "email", "age", "zip", "created") diff --git a/home/views/apiv2/admin.py b/home/views/apiv2/admin.py index 19feed23..03e62a51 100644 --- a/home/views/apiv2/admin.py +++ b/home/views/apiv2/admin.py @@ -7,12 +7,17 @@ from django.db import connection from django.db.models import CharField, Count, Q, Sum, Value from django.db.models.functions import Concat, TruncDate +from django.http import HttpRequest, HttpResponse, JsonResponse from django.views.decorators.csrf import csrf_exempt from ninja import Query, Router from ninja.errors import HttpError, ValidationError from ninja.security import django_auth_superuser from home.models import Account, Contest, DailyWalk +from home.views.api.serializers.response_serializers import ( + GetUsersRespSerializer, +) +from home.views.api.utils import paginate from .histogram.histogram import Histogram from .schemas.admin import ( @@ -25,6 +30,8 @@ HomeGraphFilter, UsersByZipInSchema, UsersByZipOutSchema, + UsersInSchema, + UsersOutSchema, ) logger = logging.getLogger(__name__) @@ -343,6 +350,51 @@ def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): return results +@router.get( + "/users", + response={200: UsersOutSchema}, + exclude_none=True, + auth=django_auth_superuser, +) +def get_users( + request: HttpRequest, response: HttpResponse, qs: Query[UsersInSchema] +): + + contest_id = qs.contest_id + filters = qs.filter_dict["filters"] + order_by = qs.filter_dict["order_by"] + page = qs.filter_dict["page"] + per_page = qs.filter_dict["per_page"] + + annotate = qs.filter_dict["annotate"] + intentionalwalk_annotate = qs.filter_dict["intentionalwalk_annotate"] + + query = ( + Account.objects.filter(filters) + .values("id", "name", "email", "age", "zip", "created") + .annotate(**annotate) + .order_by(*order_by) + ) + query, links = paginate(request, query, page, per_page) + + iw_query = ( + Account.objects.filter(id__in=(row["id"] for row in query)) + .values("id") + .annotate(**intentionalwalk_annotate) + .order_by(*order_by) + ) + + result_dto = [ + qs.update_user_dto(dto, iw_stat) + for dto, iw_stat in zip(query, iw_query) + ] + + if links: + response.headers["Link"] = links + + return 200, {"users": result_dto} + + @router.get( "/contests", response={200: List[ContestOutSchema]}, diff --git a/home/views/apiv2/schemas/account.py b/home/views/apiv2/schemas/account.py index 65807005..d8ae088c 100644 --- a/home/views/apiv2/schemas/account.py +++ b/home/views/apiv2/schemas/account.py @@ -1,7 +1,7 @@ from typing import List from ninja import Field, Schema -from pydantic import field_validator, model_validator +from pydantic import ConfigDict, field_validator, model_validator from typing_extensions import Self from home.models.account import ( @@ -14,6 +14,8 @@ class AccountSchema(Schema): + model_config = ConfigDict(extra="forbid") + account_id: str = Field(description="Account id of the user's account") name: str = Field(min_length=1, max_length=250, description="User's name") email: str = Field(decription="Email which uniquely identifies an account") @@ -79,9 +81,6 @@ class AccountSchema(Schema): description="Free-form text field for 'sexual_orien' value 'OT'", ) - class Config(Schema.Config): - extra = "forbid" - # Check for valid zip code @field_validator("zip") def check_zip(cls, zipcode) -> str: diff --git a/home/views/apiv2/schemas/admin.py b/home/views/apiv2/schemas/admin.py index ab3a6c41..a7ac46e6 100644 --- a/home/views/apiv2/schemas/admin.py +++ b/home/views/apiv2/schemas/admin.py @@ -1,12 +1,22 @@ -from datetime import date +from datetime import date, timedelta from typing import Dict, List, Optional -from django.db.models import QuerySet -from ninja import Field, FilterSchema, Schema +from django.db.models import ( + BooleanField, + Count, + ExpressionWrapper, + F, + Q, + QuerySet, + Sum, +) +from ninja import Field, FilterSchema, ModelSchema, Schema from ninja.errors import ValidationError -from pydantic import field_validator, model_validator +from pydantic import computed_field, field_validator, model_validator from typing_extensions import Self +from home.models import Account, Contest + class AdminMeSchema(Schema): id: int = Field(description="Current logged in user's id") @@ -73,22 +83,120 @@ class ContestOutSchema(Schema): class UsersInSchema(Schema): contest_id: str = Field( default=None, - description="""The ID of the contest to filter by. -
This field is mutually exclusive with the date fields. -
For distance and step metrics, this will restrict the records - to the values recorded during the contest period's start and end date. -
For account metrics, this will restrict the records to the accounts that participated in the contest.""", + description="""The ID of the contest to filter by. + Providing this also will add additional metrics related to te contest.""", ) is_tester: bool = Field( default=False, description="If true, will only return records related to tester accounts.", ) - order_by: str = None - page: int = None - query: str = None + order_by: str = Field( + default=None, + description="""The field to order the results by. Prefix with '-' to order in descending order. + The secondary sort and default sort will be lexicographically, the 'name'.""", + ) + page: int = Field( + default=1, description="The page number to return. Defaults to 1." + ) + query: str = Field( + default=None, + description="Query string to filter for containment in the name or email.", + ) + @computed_field + @property + def filter_dict(self) -> int: + filters, annotate, intentionalwalk_filter = None, None, None + if self.contest_id: + contest = Contest.objects.get(pk=self.contest_id) + dailywalk_filter = Q( + dailywalk__date__range=(contest.start, contest.end) + ) -class UsersOutSchema(Schema): + filters = Q(contests__contest_id=self.contest_id) + annotate = { + "is_new": ExpressionWrapper( + Q( + created__gte=contest.start_promo, + created__lt=contest.end + timedelta(days=1), + ), + output_field=BooleanField(), + ), + "dw_count": Count("dailywalk", filter=dailywalk_filter), + "dw_steps": Sum("dailywalk__steps", filter=dailywalk_filter), + "dw_distance": Sum( + "dailywalk__distance", filter=dailywalk_filter + ), + } + intentionalwalk_filter = Q( + intentionalwalk__start__gte=contest.start, + intentionalwalk__start__lt=contest.end + timedelta(days=1), + ) + else: + filters = Q() + annotate = { + "dw_count": Count("dailywalk"), + "dw_steps": Sum("dailywalk__steps"), + "dw_distance": Sum("dailywalk__distance"), + } + intentionalwalk_filter = Q() + + intentionalwalk_annotate = { + "iw_count": Count( + "intentionalwalk", filter=intentionalwalk_filter + ), + "iw_steps": Sum( + "intentionalwalk__steps", filter=intentionalwalk_filter + ), + "iw_distance": Sum( + "intentionalwalk__distance", filter=intentionalwalk_filter + ), + "iw_time": Sum( + "intentionalwalk__walk_time", filter=intentionalwalk_filter + ), + } + + # filter to show users vs testers + filters &= Q(is_tester=self.is_tester) + + # filter by search query + if self.query: + filters &= Q( + Q(name__icontains=self.query) | Q(email__icontains=self.query) + ) + + # set ordering + order = [] + if self.order_by: + desc = self.order_by.startswith("-") + field = F(self.order_by[1:] if desc else self.order_by) + order.append( + field.desc(nulls_last=True) + if desc + else field.asc(nulls_first=False) + ) + order.append(F("name")) + + return { + "annotate": annotate, + "intentionalwalk_annotate": intentionalwalk_annotate, + "contest_id": self.contest_id, + "filters": filters, + "order_by": order, + "page": self.page, + "per_page": 25, + } + + # @property + def update_user_dto(self, dto, iw_stats): + dto.update(iw_stats) + # at this point, we have enough info to determine if user is "active" + if self.contest_id: + dto["is_active"] = dto["dw_count"] > 0 or dto["iw_count"] > 0 + return dto + + +class UsersOut(ModelSchema): dw_count: int = Field(description="Total number of daily walk users") dw_steps: int = Field( description="Total number of steps users took on daily walks" @@ -117,6 +225,15 @@ class UsersOutSchema(Schema): ) is_active: bool = None + class Meta: + model = Account + # fields = "__all__" + fields = ("name", "email", "age", "zip", "created") + + +class UsersOutSchema(Schema): + users: List[UsersOut] + class UsersByZipInSchema(Schema): contest_id: str = Field( diff --git a/home/views/apiv2/schemas/contest.py b/home/views/apiv2/schemas/contest.py index 58230a34..4a7531db 100644 --- a/home/views/apiv2/schemas/contest.py +++ b/home/views/apiv2/schemas/contest.py @@ -1,14 +1,12 @@ from datetime import date -from typing import List, Optional -from ninja import Field, ModelSchema, Schema -from pydantic import ValidationInfo, field_validator, model_validator -from typing_extensions import Self - -from home.models.contest import Contest +from ninja import Field, Schema +from pydantic import ConfigDict class ContestSchema(Schema): + model_config = ConfigDict(extra="forbid") + contest_id: str = Field(description="Contest identifier") start_baseline: date | None = Field( description="Start of baseline period (prior to contest start)" @@ -17,9 +15,6 @@ class ContestSchema(Schema): start: date = Field(description="Contest start date") end: date = Field(decription="Contest end date") - class Config(Schema.Config): - extra = "forbid" - class ErrorSchema(Schema): message: str = Field( diff --git a/home/views/apiv2/schemas/dailywalk.py b/home/views/apiv2/schemas/dailywalk.py index c6da9e32..fb93c240 100644 --- a/home/views/apiv2/schemas/dailywalk.py +++ b/home/views/apiv2/schemas/dailywalk.py @@ -1,21 +1,22 @@ from datetime import date as date_type from ninja import Field, Schema -from pydantic import computed_field +from pydantic import ConfigDict, computed_field class DailyWalkIn(Schema): + model_config = ConfigDict(extra="forbid") + date: str = Field( description="The specific date for which the steps are recorded" ) steps: int = Field(ge=0, description="Number of steps recorded") distance: float = Field(ge=0, description="Total distance covered") - class Config(Schema.Config): - extra = "forbid" - class DailyWalkInSchema(Schema): + model_config = ConfigDict(extra="forbid") + account_id: str = Field( description="Unique identifier for user's account." ) @@ -23,9 +24,6 @@ class DailyWalkInSchema(Schema): description="List of record daily walks: date, steps, distance." ) - class Config(Schema.Config): - extra = "forbid" - class DailyWalkOut(Schema): date: date_type = Field( diff --git a/home/views/apiv2/schemas/device.py b/home/views/apiv2/schemas/device.py index 8a9f6526..14c3b47e 100644 --- a/home/views/apiv2/schemas/device.py +++ b/home/views/apiv2/schemas/device.py @@ -1,10 +1,13 @@ from datetime import datetime from ninja import Field, Schema +from pydantic import ConfigDict from typing_extensions import Self class DeviceInSchema(Schema): + model_config = ConfigDict(extra="forbid") + device_id: str = Field( max_length=250, description="""A unique id generated by the app when it is first installed" @@ -52,11 +55,10 @@ class DeviceInSchema(Schema): """, ) - class Config(Schema.Config): - extra = "forbid" - class DeviceOutSchema(Schema): + model_config = ConfigDict(extra="forbid") + device_id: str = Field( max_length=250, description="""A unique id generated by the app when it is first installed" @@ -107,9 +109,6 @@ class DeviceOutSchema(Schema): """, ) - class Config(Schema.Config): - extra = "forbid" - class ErrorSchema(Schema): message: str = Field( diff --git a/home/views/apiv2/schemas/intentionalwalk.py b/home/views/apiv2/schemas/intentionalwalk.py index 0002819c..111082a2 100644 --- a/home/views/apiv2/schemas/intentionalwalk.py +++ b/home/views/apiv2/schemas/intentionalwalk.py @@ -2,10 +2,12 @@ from typing import List, Optional from ninja import Field, Schema -from pydantic import computed_field +from pydantic import ConfigDict, computed_field class IntentionalWalkInBaseSchema(Schema): + model_config = ConfigDict(extra="forbid") + event_id: str = Field( max_length=250, description="v4 random uuid generated on the client.", @@ -22,9 +24,6 @@ class IntentionalWalkInBaseSchema(Schema): ) distance: float = Field(ge=0, description="Total distance covered.") - class Config(Schema.Config): - extra = "forbid" - class IntentionalWalkInSchema(Schema): account_id: str = Field( diff --git a/home/views/apiv2/schemas/leaderboard.py b/home/views/apiv2/schemas/leaderboard.py index 85c9420c..a4ecb581 100644 --- a/home/views/apiv2/schemas/leaderboard.py +++ b/home/views/apiv2/schemas/leaderboard.py @@ -1,10 +1,13 @@ from typing import Optional from ninja import Field, Schema +from pydantic import ConfigDict from typing_extensions import Self class LeaderboardUserSchema(Schema): + model_config = ConfigDict(extra="forbid") + account_id: int = Field( desription="Account id of the account the data is linked to." ) @@ -15,9 +18,6 @@ class LeaderboardUserSchema(Schema): description="Device id of the device the data is coming from.", ) - class Config(Schema.Config): - extra = "forbid" - class LeaderboardSchema(Schema): leaderboard: list[LeaderboardUserSchema] = Field( diff --git a/home/views/apiv2/schemas/weeklygoal.py b/home/views/apiv2/schemas/weeklygoal.py index 9da0d659..49660cb4 100644 --- a/home/views/apiv2/schemas/weeklygoal.py +++ b/home/views/apiv2/schemas/weeklygoal.py @@ -1,12 +1,12 @@ from datetime import date -from typing import Optional from ninja import Field, Schema -from pydantic import field_validator, model_validator -from typing_extensions import Self +from pydantic import ConfigDict class WeeklyGoalIn(Schema): + model_config = ConfigDict(extra="forbid") + start_of_week: str = Field( description="The start of the week for the goal." ) @@ -15,11 +15,10 @@ class WeeklyGoalIn(Schema): ge=0, description="Number of days per week to reach goal." ) - class Config(Schema.Config): - extra = "forbid" - class WeeklyGoalInSchema(Schema): + model_config = ConfigDict(extra="forbid") + account_id: str = Field( description="Account id of the account the data is linked to." ) @@ -27,9 +26,6 @@ class WeeklyGoalInSchema(Schema): description="Start week, steps, and days of the weekly goal." ) - class Config(Schema.Config): - extra = "forbid" - class WeeklyGoalOut(Schema): start_of_week: date = Field( @@ -51,13 +47,12 @@ class WeeklyGoalOutSchema(Schema): class WeeklyGoalListInSchema(Schema): + model_config = ConfigDict(extra="forbid") + account_id: str = Field( description="Account id of the account the data is linked to." ) - class Config(Schema.Config): - extra = "forbid" - class WeeklyGoalOutList(Schema): id: int = Field(description="Unique id for the set weekly goal.") From fb26f4fc8a2e8a25be3795ce1efb456f1bba3bac Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Fri, 5 Jul 2024 00:32:01 -0700 Subject: [PATCH 4/9] Fixed admin/users serializer --- home/views/api/admin.py | 3 +-- .../api/serializers/response_serializers.py | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/home/views/api/admin.py b/home/views/api/admin.py index 9c57f1a5..7dd215dc 100644 --- a/home/views/api/admin.py +++ b/home/views/api/admin.py @@ -344,8 +344,7 @@ def update_user_dto(dto, iw_stats): for dto, iw_stat in zip(query, iw_query) ] resp = GetUsersRespSerializer(result_dto, many=True) - # response = JsonResponse(resp.data, safe=False) - response = JsonResponse(result_dto, safe=False) + response = JsonResponse(resp.data, safe=False) if links: response.headers["Link"] = links diff --git a/home/views/api/serializers/response_serializers.py b/home/views/api/serializers/response_serializers.py index 874ae233..560550c2 100644 --- a/home/views/api/serializers/response_serializers.py +++ b/home/views/api/serializers/response_serializers.py @@ -36,4 +36,19 @@ class GetUsersRespSerializer(serializers.ModelSerializer): class Meta: model = Account - fields = ("name", "email", "age", "zip", "created") + fields = ( + "name", + "email", + "age", + "zip", + "created", + "dw_count", + "dw_steps", + "dw_distance", + "iw_count", + "iw_steps", + "iw_distance", + "iw_time", + "is_new", + "is_active", + ) From c44d75b70b7e617f98c6779c0d9be67c3fbb2922 Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Sun, 7 Jul 2024 17:47:02 -0700 Subject: [PATCH 5/9] Added more testing for api v1 --- home/models/account.py | 6 - home/tests/integration/appuser/test_create.py | 55 +++++++ home/tests/integration/appuser/test_delete.py | 78 ++++++++++ home/tests/integration/appuser/test_update.py | 77 ++++++++++ .../integration/dailywalk/test_create.py | 104 ++++++++++++- home/tests/integration/dailywalk/test_get.py | 96 ++++++++++++ .../intentionalwalk/test_create.py | 122 ++++++++++++++- .../integration/intentionalwalk/test_get.py | 97 ++++++++++++ .../tests/integration/views/api/test_admin.py | 51 +++++++ .../integration/views/api/test_export.py | 17 ++- home/tests/integration/views/api/utils.py | 5 +- .../integration/weeklygoal/test_create.py | 74 +++++++++ home/tests/integration/weeklygoal/test_get.py | 78 +++++++++- home/tests/unit/api/test_appuser.py | 4 +- home/tests/unit/api/test_leaderboard.py | 143 +++++++++++++++++- home/tests/unit/test_histogram.py | 37 ++++- home/views/api/admin.py | 2 - home/views/api/api.py | 0 home/views/api/export.py | 9 +- 19 files changed, 1021 insertions(+), 34 deletions(-) delete mode 100644 home/views/api/api.py diff --git a/home/models/account.py b/home/models/account.py index 1c0c2d4f..6a422f3a 100644 --- a/home/models/account.py +++ b/home/models/account.py @@ -152,11 +152,5 @@ class Account(models.Model): def __str__(self): return f"{self.name} | {self.email}" - def set_race(self, x): - self.foo = json.dumps(x) - - def get_race(self): - return json.loads(self.foo) - class Meta: ordering = ("-created",) diff --git a/home/tests/integration/appuser/test_create.py b/home/tests/integration/appuser/test_create.py index f67920c2..8adad512 100644 --- a/home/tests/integration/appuser/test_create.py +++ b/home/tests/integration/appuser/test_create.py @@ -299,3 +299,58 @@ def test_create_appuser_failure_missing_field_device_id(self): "Required input 'account_id' missing in the request", msg=fail_message, ) + + # Test invalid method + def test_create_appuser_invalid_method(self): + # Required fields for user creation + request_params = self.request_params.copy() + + # Test not allowed get method + response = self.client.get( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=request_params["account_id"], + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/appuser/test_delete.py b/home/tests/integration/appuser/test_delete.py index a22edad1..6a4f133a 100644 --- a/home/tests/integration/appuser/test_delete.py +++ b/home/tests/integration/appuser/test_delete.py @@ -166,3 +166,81 @@ def test_delete_user_failure_incorrect_params(self): expected_msg="Required input 'account_id' missing in the request", ) self.check_users_still_exist([self.account_id1, self.account_id2]) + + # Test invalid method - patch + def test_delete_user_invalid_method(self): + # Test get method + response = self.client.get( + path=self.del_url, + data={"account_id": self.account_id1}, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test post method + response = self.client.post( + path=self.del_url, + data={"account_id": self.account_id1}, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test put method + response = self.client.put( + path=self.del_url, + data={"account_id": self.account_id1}, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test patch method + response = self.client.patch( + path=self.del_url, + data={"account_id": self.account_id1}, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/appuser/test_update.py b/home/tests/integration/appuser/test_update.py index 8c8ed907..2cef8624 100644 --- a/home/tests/integration/appuser/test_update.py +++ b/home/tests/integration/appuser/test_update.py @@ -195,3 +195,80 @@ def test_update_appuser_email(self): "Email cannot be updated. Contact admin", msg=fail_message, ) + + # Test updating User's age without providing account_id + # This should return a response with status "error" and a fail message + def test_update_appuser_missing_account_id(self): + # Required fields for user creation + # Remove the account_id field + request_params = {**self.request_params, "age": 88} + del request_params["account_id"] + + # Register the user + response = self.client.put( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Required input 'account_id' missing in the request", + msg=fail_message, + ) + + # Test invalid method - patch + def test_update_appuser_invalid_methods(self): + # Required fields for user creation + request_params = {**self.request_params, "age": 88} + + # Test get method + response = self.client.get( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test patch method + response = self.client.patch( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test delete method + response = self.client.delete( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/dailywalk/test_create.py b/home/tests/integration/dailywalk/test_create.py index afeec1fd..b122c3bb 100644 --- a/home/tests/integration/dailywalk/test_create.py +++ b/home/tests/integration/dailywalk/test_create.py @@ -1,5 +1,6 @@ from django.test import Client, TestCase from freezegun import freeze_time + from home.models import Contest, Device @@ -199,10 +200,10 @@ def test_create_dailywalk_invalidaccount(self): msg=fail_message, ) - # Test creation of a daily walk with a missing field - def test_create_dailywalk_missing_steps(self): + # Test creation of a daily walk with a missing daily_walks field + def test_create_dailywalk_missing_daily_walks(self): - del self.request_params["daily_walks"][0]["steps"] + del self.request_params["daily_walks"] # Send the request response = self.client.post( @@ -216,6 +217,29 @@ def test_create_dailywalk_missing_steps(self): response_data = response.json() fail_message = f"Server response - {response_data}" self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Required input 'daily_walks' missing in the request", + msg=fail_message, + ) + + # Test creation of a daily walk with a missing field + def test_create_dailywalk_missing_steps(self): + request_params = self.request_params.copy() + del request_params["daily_walks"][0]["steps"] + + # Send the request + response = self.client.post( + path=self.url, + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) self.assertEqual( response_data["message"], "Required input 'steps' missing in the request", @@ -269,3 +293,77 @@ def test_bulk_create_dailywalk(self): self.bulk_request_params["daily_walks"][i]["distance"], msg=fail_message, ) + + # Test invalid method + def test_create_dailywalk_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/dailywalk/test_get.py b/home/tests/integration/dailywalk/test_get.py index 8d59225c..2a283637 100644 --- a/home/tests/integration/dailywalk/test_get.py +++ b/home/tests/integration/dailywalk/test_get.py @@ -87,6 +87,28 @@ def test_dailywalk_get_failure(self): msg=fail_message, ) + def test_dailywalk_get_missing_account_id(self): + request_params = self.request_params.copy() + del request_params["account_id"] + + # Send the request + response = self.client.post( + path=self.url, + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Required input 'account_id' missing in the request", + msg=fail_message, + ) + def test_dailywalk_get(self): response = self.client.post( path=self.url, @@ -315,3 +337,77 @@ def test_dailywalk_get_aggregated(self): ), msg=fail_message, ) + + # Test invalid method + def test_dailywalk_get_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/intentionalwalk/test_create.py b/home/tests/integration/intentionalwalk/test_create.py index 845e7d89..1121bd86 100644 --- a/home/tests/integration/intentionalwalk/test_create.py +++ b/home/tests/integration/intentionalwalk/test_create.py @@ -130,7 +130,7 @@ def test_create_intentionalwalk_invalidaccount(self): msg=fail_message, ) - # Test creation of a intentional walk with a missing field + # Test creation of a intentional walk with a missing steps field def test_create_intentionalwalk_missing_steps(self): del self.request_params["intentional_walks"][0]["steps"] @@ -152,3 +152,123 @@ def test_create_intentionalwalk_missing_steps(self): "Required input 'steps' missing in the request", msg=fail_message, ) + + # Test creation of a intentional walk with a missing account_id field + def test_create_intentionalwalk_missing_account_id(self): + + del self.request_params["account_id"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Required input 'account_id' missing in the request", + msg=fail_message, + ) + + # Test creation of a intentional walk with a missing intentional_walks field + def test_create_intentionalwalk_missing_intentional_walks(self): + + del self.request_params["intentional_walks"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Required input 'intentional_walks' missing in the request", + msg=fail_message, + ) + + # Test invalid method + def test_create_intentionalwalk_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/intentionalwalk/test_get.py b/home/tests/integration/intentionalwalk/test_get.py index 5dd6d86a..c88428d9 100644 --- a/home/tests/integration/intentionalwalk/test_get.py +++ b/home/tests/integration/intentionalwalk/test_get.py @@ -447,3 +447,100 @@ def test_intentionalwalk_get_aggregated(self): ], msg=fail_message, ) + + # Test request with a missing account_id field + def test_intentionalwalk_get_missing_account_id(self): + + del self.request_params["account_id"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Required input 'account_id' missing in the request", + msg=fail_message, + ) + + # Test invalid method + def test_intentionalwalk_get_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/views/api/test_admin.py b/home/tests/integration/views/api/test_admin.py index 04ba1438..4896b171 100644 --- a/home/tests/integration/views/api/test_admin.py +++ b/home/tests/integration/views/api/test_admin.py @@ -1,5 +1,6 @@ import logging from random import seed + from django.test import Client, TestCase from .utils import Login, generate_test_data @@ -37,6 +38,11 @@ def test_get_me(self): def test_get_home(self): c = Client() + # when unauthenticated, returns an empty response + response = c.get("/api/admin/home") + self.assertEqual(response.status_code, 204) + + # log in self.assertTrue(Login.login(c)) response = c.get("/api/admin/home") data = response.json() @@ -210,11 +216,17 @@ def test_get_contests(self): def test_get_users(self): c = Client() self.assertTrue(Login.login(c)) + response = c.get("/api/admin/users") data = response.json() self.assertEqual(response.status_code, 200) self.assertEqual(len(data), 5) # 6 accounts - 1 tester + response = c.get( + f"/api/admin/users?contest_id={self.contest0_id}&is_tester=invalid" + ) + self.assertEqual(response.status_code, 422) + response = c.get(f"/api/admin/users?contest_id={self.contest0_id}") self.assertEqual(response.status_code, 200) data = response.json() @@ -274,7 +286,22 @@ def test_get_users(self): def test_get_users_by_zip(self): c = Client() + # when unauthenticated, returns status code 401 + response = c.get("/api/admin/users/zip") + self.assertEqual(response.status_code, 401) + + # authenticated self.assertTrue(Login.login(c)) + + response = c.get(f"/api/admin/users/zip") + data = response.json() + self.assertEqual( + data, + { + "total": {"94102": 1, "94103": 2, "94104": 2}, + }, + ) + response = c.get(f"/api/admin/users/zip?contest_id={self.contest0_id}") data = response.json() self.assertEqual( @@ -287,7 +314,19 @@ def test_get_users_by_zip(self): def test_get_users_active_by_zip(self): c = Client() + # when unauthenticated, returns status code 401 + response = c.get( + f"/api/admin/users/zip/active?contest_id={self.contest0_id}" + ) + self.assertEqual(response.status_code, 401) + + # authenticated self.assertTrue(Login.login(c)) + + # no contest_id given + response = c.get(f"/api/admin/users/zip/active") + self.assertEqual(response.status_code, 422) + response = c.get( f"/api/admin/users/zip/active?contest_id={self.contest0_id}" ) @@ -302,7 +341,19 @@ def test_get_users_active_by_zip(self): def test_get_users_median_steps_by_zip(self): c = Client() + # when unauthenticated, returns status code 401 + response = c.get( + f"/api/admin/users/zip/steps?contest_id={self.contest0_id}" + ) + self.assertEqual(response.status_code, 401) + + # authenticated self.assertTrue(Login.login(c)) + + # no contest_id given + response = c.get(f"/api/admin/users/zip/steps") + self.assertEqual(response.status_code, 422) + response = c.get( f"/api/admin/users/zip/steps?contest_id={self.contest0_id}" ) diff --git a/home/tests/integration/views/api/test_export.py b/home/tests/integration/views/api/test_export.py index 3ff2847b..770c9dc6 100644 --- a/home/tests/integration/views/api/test_export.py +++ b/home/tests/integration/views/api/test_export.py @@ -1,12 +1,12 @@ import csv import io import logging - from datetime import date, timedelta from django.test import Client, TestCase from home.views.api.export import CSV_COLUMNS + from .utils import Login, generate_test_data logger = logging.getLogger(__name__) @@ -75,3 +75,18 @@ def test_export_users(self): self.assertEqual(rows[3]["Total Steps During Contest"], "") self.assertEqual(rows[3]["Total Recorded Walks During Contest"], "0") self.assertEqual(rows[3]["Total Recorded Steps During Contest"], "") + + def test_export_users_missing_contest_id(self): + c = Client() + self.assertTrue(Login.login(c)) + + response = c.get("/api/export/users?contest_id=") + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + + def test_export_users_unauthenticated(self): + c = Client() + + response = c.get(f"/api/export/users?contest_id={self.contest0_id}") + # Check for a successful response by the server + self.assertEqual(response.status_code, 401) diff --git a/home/tests/integration/views/api/utils.py b/home/tests/integration/views/api/utils.py index 84ebe8f4..67af1814 100644 --- a/home/tests/integration/views/api/utils.py +++ b/home/tests/integration/views/api/utils.py @@ -1,12 +1,11 @@ import math - from datetime import date, datetime, timedelta -from freezegun import freeze_time -from pytz import utc from django.contrib.auth.models import User from django.test import Client from django.utils import timezone +from freezegun import freeze_time +from pytz import utc from home.utils.generators import ( AccountGenerator, diff --git a/home/tests/integration/weeklygoal/test_create.py b/home/tests/integration/weeklygoal/test_create.py index f115229e..21d03f57 100644 --- a/home/tests/integration/weeklygoal/test_create.py +++ b/home/tests/integration/weeklygoal/test_create.py @@ -250,3 +250,77 @@ def test_create_weeklygoal_wihtout_weeklygoal_steps(self): "Required input 'steps' missing in the request", msg=fail_message, ) + + # Test invalid methods + def test_create_weeklygoal_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/integration/weeklygoal/test_get.py b/home/tests/integration/weeklygoal/test_get.py index 50a698a2..00c3a77a 100644 --- a/home/tests/integration/weeklygoal/test_get.py +++ b/home/tests/integration/weeklygoal/test_get.py @@ -1,7 +1,7 @@ -from django.test import Client, TestCase from django.forms.models import model_to_dict +from django.test import Client, TestCase -from home.models import WeeklyGoal, Device +from home.models import Device, WeeklyGoal class ApiTestCase(TestCase): @@ -148,3 +148,77 @@ def test_weeklygoal_get_failure_missing_account_id(self): "Required input 'account_id' missing in the request", msg=fail_message, ) + + # Test invalid methods + def test_weeklygoal_get_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Method not allowed!", + msg=fail_message, + ) diff --git a/home/tests/unit/api/test_appuser.py b/home/tests/unit/api/test_appuser.py index d79ffc34..78e55afe 100644 --- a/home/tests/unit/api/test_appuser.py +++ b/home/tests/unit/api/test_appuser.py @@ -55,8 +55,8 @@ def test_invalid_input(self): dict(zip="1234"), dict(age=0), dict(is_latino=True), - dict(gender="", gender_other="other gender"), - dict(sexual_orien="", sexual_orien_other="idk"), + dict(gender=None, gender_other="other gender"), + dict(sexual_orien=None, sexual_orien_other="idk"), dict(race=None, race_other="other race"), dict(gender="NB", gender_other="nonbinary"), ] diff --git a/home/tests/unit/api/test_leaderboard.py b/home/tests/unit/api/test_leaderboard.py index e56c97f1..3efdcf65 100644 --- a/home/tests/unit/api/test_leaderboard.py +++ b/home/tests/unit/api/test_leaderboard.py @@ -5,11 +5,7 @@ from freezegun import freeze_time from home.models import Contest, Device, Leaderboard -from home.utils.generators import ( - AccountGenerator, - DeviceGenerator, -) - +from home.utils.generators import AccountGenerator, DeviceGenerator logger = logging.getLogger(__name__) @@ -388,3 +384,140 @@ def test_get_leaderboard(self): response_data_pretest["payload"]["leaderboard"], ) self.assertEqual(response.status_code, 200) + + def test_get_leaderboard_missing_contest_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = { + "device_id": device1[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/leaderboard/get/?" + query) + self.assertEqual(response.status_code, 200) + response_message = response.content + fail_message = f"Server response - {response_message}" + self.assertEqual( + response_message, + b"No contest specified", + msg=fail_message, + ) + + def test_get_leaderboard_invalid_contest_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = { + "contest_id": "invalid_contest_id", + "device_id": device1[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/leaderboard/get/?" + query) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + "Contest not found", + msg=fail_message, + ) + + def test_get_leaderboard_missing_device_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = {"contest_id": contest.contest_id} + query = urllib.parse.urlencode(data) + response = self.client.get("/api/leaderboard/get/?" + query) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + f"Unregistered device - Please register first!device_id:{data.get('device_id')}", + msg=fail_message, + ) + + def test_get_leaderboard_invalid_device_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = { + "contest_id": contest.contest_id, + "device_id": "invalid_device_id", + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/leaderboard/get/?" + query) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["status"], "error", msg=fail_message) + self.assertEqual( + response_data["message"], + f"Unregistered device - Please register first!device_id:{data['device_id']}", + msg=fail_message, + ) diff --git a/home/tests/unit/test_histogram.py b/home/tests/unit/test_histogram.py index b0856f4f..b07cc734 100644 --- a/home/tests/unit/test_histogram.py +++ b/home/tests/unit/test_histogram.py @@ -1,6 +1,8 @@ +from datetime import date, datetime, timezone from random import seed -from datetime import date, timezone, datetime + from django.test import TestCase + from home.models import Account, Contest from home.models.dailywalk import DailyWalk from home.models.intentionalwalk import IntentionalWalk @@ -144,6 +146,39 @@ def create_test_cases(self): "error": "greater than", }, }, + { + "name": "bin_custom values must be in increasing order", + "input": { + "field": "steps", + "bin_custom": "1,1,3,4,5", + "model": Leaderboard, + }, + "expect": { + "error": "values must be in increasing order", + }, + }, + { + "name": "bin_custom values must be positive", + "input": { + "field": "steps", + "bin_custom": "-1,2,3,4,5", + "model": Leaderboard, + }, + "expect": { + "error": "values must be positive", + }, + }, + { + "name": "bin_custom could not be parsed", + "input": { + "field": "steps", + "bin_custom": "1,2,3,4,5,a", + "model": Leaderboard, + }, + "expect": { + "error": "could not be parsed", + }, + }, { "name": "invalid parameter for `field`", "input": { diff --git a/home/views/api/admin.py b/home/views/api/admin.py index 7dd215dc..e0d5fa72 100644 --- a/home/views/api/admin.py +++ b/home/views/api/admin.py @@ -9,8 +9,6 @@ from django.db.models.functions import Concat, TruncDate from django.http import HttpRequest, HttpResponse, JsonResponse from django.views import View -from rest_framework import status -from rest_framework.response import Response from home.models import Account, Contest, DailyWalk from home.models.intentionalwalk import IntentionalWalk diff --git a/home/views/api/api.py b/home/views/api/api.py deleted file mode 100644 index e69de29b..00000000 diff --git a/home/views/api/export.py b/home/views/api/export.py index 446348f9..56e9b4ac 100644 --- a/home/views/api/export.py +++ b/home/views/api/export.py @@ -2,16 +2,9 @@ import logging import os import tempfile - from datetime import timedelta -from django.db.models import ( - BooleanField, - Count, - ExpressionWrapper, - Q, - Sum, -) +from django.db.models import BooleanField, Count, ExpressionWrapper, Q, Sum from django.http import FileResponse, HttpResponse from django.views import View From c01dd42807709e0822a339aa255a04feddf313b4 Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Mon, 19 Aug 2024 13:14:38 -0700 Subject: [PATCH 6/9] Added tests for APIv2. --- home/tests/__init__.py | 1 - home/tests/integration/__init__.py | 1 - .../{contest => apiv1}/__init__.py | 0 .../{views => apiv1/appuser}/__init__.py | 0 .../{ => apiv1}/appuser/test_create.py | 0 .../{ => apiv1}/appuser/test_delete.py | 0 .../{ => apiv1}/appuser/test_update.py | 0 .../{views/api => apiv1/contest}/__init__.py | 0 .../{ => apiv1}/contest/test_current.py | 0 .../integration/apiv1/dailywalk/__init__.py | 1 + .../{ => apiv1}/dailywalk/test_create.py | 0 .../{ => apiv1}/dailywalk/test_get.py | 0 .../{ => apiv1}/dailywalk/test_update.py | 0 .../integration/apiv1/histogram/__init__.py | 1 + .../{ => apiv1}/histogram/test_histogram.py | 6 +- .../web => apiv1/intentionalwalk}/__init__.py | 0 .../intentionalwalk/test_create.py | 0 .../{ => apiv1}/intentionalwalk/test_get.py | 0 .../apiv1/views}/__init__.py | 0 .../integration/apiv1/views/api/__init__.py | 1 + .../{ => apiv1}/views/api/test_admin.py | 0 .../{ => apiv1}/views/api/test_export.py | 0 .../{ => apiv1}/views/api/utils.py | 0 .../integration/apiv1/views/web/__init__.py | 0 .../{ => apiv1}/views/web/test_data.py | 0 .../integration/apiv1/weeklygoal/__init__.py | 1 + .../{ => apiv1}/weeklygoal/test_create.py | 0 .../{ => apiv1}/weeklygoal/test_get.py | 0 home/tests/integration/apiv2/__init__.py | 0 .../integration/apiv2/appuser/__init__.py | 1 + .../integration/apiv2/appuser/test_create.py | 276 ++++++++++ .../integration/apiv2/appuser/test_delete.py | 186 +++++++ .../integration/apiv2/appuser/test_update.py | 379 +++++++++++++ .../integration/apiv2/contest/__init__.py | 0 .../integration/apiv2/contest/test_current.py | 138 +++++ .../integration/apiv2/dailywalk/__init__.py | 0 .../apiv2/dailywalk/test_create.py | 338 ++++++++++++ .../integration/apiv2/dailywalk/test_get.py | 336 ++++++++++++ .../apiv2/dailywalk/test_update.py | 77 +++ .../integration/apiv2/device/__init__.py | 0 .../integration/apiv2/device/test_delete.py | 186 +++++++ .../integration/apiv2/device/test_update.py | 287 ++++++++++ .../integration/apiv2/histogram/__init__.py | 0 .../apiv2/histogram/test_histogram.py | 217 ++++++++ .../apiv2/intentionalwalk/__init__.py | 0 .../apiv2/intentionalwalk/test_create.py | 263 +++++++++ .../apiv2/intentionalwalk/test_get.py | 406 ++++++++++++++ .../tests/integration/apiv2/views/__init__.py | 1 + .../integration/apiv2/views/api/__init__.py | 0 .../integration/apiv2/views/api/test_admin.py | 400 ++++++++++++++ .../apiv2/views/api/test_export.py | 92 ++++ .../integration/apiv2/views/api/utils.py | 126 +++++ .../integration/apiv2/views/web/__init__.py | 1 + .../integration/apiv2/views/web/test_data.py | 260 +++++++++ .../integration/apiv2/weeklygoal/__init__.py | 0 .../apiv2/weeklygoal/test_create.py | 264 +++++++++ .../integration/apiv2/weeklygoal/test_get.py | 198 +++++++ home/tests/integration/appuser/__init__.py | 1 - home/tests/integration/dailywalk/__init__.py | 1 - .../integration/intentionalwalk/__init__.py | 1 - home/tests/integration/weeklygoal/__init__.py | 1 - home/tests/unit/apiv1/__init__.py | 0 home/tests/unit/apiv1/api/__init__.py | 1 + .../unit/{ => apiv1}/api/test_appuser.py | 0 .../unit/{ => apiv1}/api/test_leaderboard.py | 0 home/tests/unit/{ => apiv1}/test_contest.py | 0 home/tests/unit/{ => apiv1}/test_dates.py | 0 home/tests/unit/{ => apiv1}/test_histogram.py | 2 +- home/tests/unit/{ => apiv1}/test_user.py | 0 home/tests/unit/{ => apiv1}/test_utils.py | 12 +- home/tests/unit/apiv2/__init__.py | 0 home/tests/unit/apiv2/api/__init__.py | 1 + home/tests/unit/apiv2/api/test_appuser.py | 66 +++ home/tests/unit/apiv2/api/test_leaderboard.py | 508 ++++++++++++++++++ home/tests/unit/apiv2/test_contest.py | 251 +++++++++ home/tests/unit/apiv2/test_dates.py | 20 + home/tests/unit/apiv2/test_histogram.py | 459 ++++++++++++++++ home/tests/unit/apiv2/test_user.py | 201 +++++++ home/tests/unit/apiv2/test_utils.py | 39 ++ home/views/apiv2/admin.py | 45 +- home/views/apiv2/appuser.py | 16 +- home/views/apiv2/contest.py | 4 +- home/views/apiv2/dailywalk.py | 3 +- home/views/apiv2/device.py | 18 +- home/views/apiv2/export.py | 3 +- home/views/apiv2/leaderboard.py | 14 +- home/views/apiv2/schemas/account.py | 6 - home/views/apiv2/schemas/admin.py | 35 +- home/views/apiv2/schemas/device.py | 54 -- home/views/apiv2/schemas/weeklygoal.py | 8 - home/views/apiv2/weeklygoal.py | 14 +- 91 files changed, 6075 insertions(+), 153 deletions(-) rename home/tests/integration/{contest => apiv1}/__init__.py (100%) rename home/tests/integration/{views => apiv1/appuser}/__init__.py (100%) rename home/tests/integration/{ => apiv1}/appuser/test_create.py (100%) rename home/tests/integration/{ => apiv1}/appuser/test_delete.py (100%) rename home/tests/integration/{ => apiv1}/appuser/test_update.py (100%) rename home/tests/integration/{views/api => apiv1/contest}/__init__.py (100%) rename home/tests/integration/{ => apiv1}/contest/test_current.py (100%) create mode 100644 home/tests/integration/apiv1/dailywalk/__init__.py rename home/tests/integration/{ => apiv1}/dailywalk/test_create.py (100%) rename home/tests/integration/{ => apiv1}/dailywalk/test_get.py (100%) rename home/tests/integration/{ => apiv1}/dailywalk/test_update.py (100%) create mode 100644 home/tests/integration/apiv1/histogram/__init__.py rename home/tests/integration/{ => apiv1}/histogram/test_histogram.py (98%) rename home/tests/integration/{views/web => apiv1/intentionalwalk}/__init__.py (100%) rename home/tests/integration/{ => apiv1}/intentionalwalk/test_create.py (100%) rename home/tests/integration/{ => apiv1}/intentionalwalk/test_get.py (100%) rename home/tests/{unit/api => integration/apiv1/views}/__init__.py (100%) create mode 100644 home/tests/integration/apiv1/views/api/__init__.py rename home/tests/integration/{ => apiv1}/views/api/test_admin.py (100%) rename home/tests/integration/{ => apiv1}/views/api/test_export.py (100%) rename home/tests/integration/{ => apiv1}/views/api/utils.py (100%) create mode 100644 home/tests/integration/apiv1/views/web/__init__.py rename home/tests/integration/{ => apiv1}/views/web/test_data.py (100%) create mode 100644 home/tests/integration/apiv1/weeklygoal/__init__.py rename home/tests/integration/{ => apiv1}/weeklygoal/test_create.py (100%) rename home/tests/integration/{ => apiv1}/weeklygoal/test_get.py (100%) create mode 100644 home/tests/integration/apiv2/__init__.py create mode 100644 home/tests/integration/apiv2/appuser/__init__.py create mode 100644 home/tests/integration/apiv2/appuser/test_create.py create mode 100644 home/tests/integration/apiv2/appuser/test_delete.py create mode 100644 home/tests/integration/apiv2/appuser/test_update.py create mode 100644 home/tests/integration/apiv2/contest/__init__.py create mode 100644 home/tests/integration/apiv2/contest/test_current.py create mode 100644 home/tests/integration/apiv2/dailywalk/__init__.py create mode 100644 home/tests/integration/apiv2/dailywalk/test_create.py create mode 100644 home/tests/integration/apiv2/dailywalk/test_get.py create mode 100644 home/tests/integration/apiv2/dailywalk/test_update.py create mode 100644 home/tests/integration/apiv2/device/__init__.py create mode 100644 home/tests/integration/apiv2/device/test_delete.py create mode 100644 home/tests/integration/apiv2/device/test_update.py create mode 100644 home/tests/integration/apiv2/histogram/__init__.py create mode 100644 home/tests/integration/apiv2/histogram/test_histogram.py create mode 100644 home/tests/integration/apiv2/intentionalwalk/__init__.py create mode 100644 home/tests/integration/apiv2/intentionalwalk/test_create.py create mode 100644 home/tests/integration/apiv2/intentionalwalk/test_get.py create mode 100644 home/tests/integration/apiv2/views/__init__.py create mode 100644 home/tests/integration/apiv2/views/api/__init__.py create mode 100644 home/tests/integration/apiv2/views/api/test_admin.py create mode 100644 home/tests/integration/apiv2/views/api/test_export.py create mode 100644 home/tests/integration/apiv2/views/api/utils.py create mode 100644 home/tests/integration/apiv2/views/web/__init__.py create mode 100644 home/tests/integration/apiv2/views/web/test_data.py create mode 100644 home/tests/integration/apiv2/weeklygoal/__init__.py create mode 100644 home/tests/integration/apiv2/weeklygoal/test_create.py create mode 100644 home/tests/integration/apiv2/weeklygoal/test_get.py delete mode 100644 home/tests/integration/appuser/__init__.py delete mode 100644 home/tests/integration/dailywalk/__init__.py delete mode 100644 home/tests/integration/intentionalwalk/__init__.py delete mode 100644 home/tests/integration/weeklygoal/__init__.py create mode 100644 home/tests/unit/apiv1/__init__.py create mode 100644 home/tests/unit/apiv1/api/__init__.py rename home/tests/unit/{ => apiv1}/api/test_appuser.py (100%) rename home/tests/unit/{ => apiv1}/api/test_leaderboard.py (100%) rename home/tests/unit/{ => apiv1}/test_contest.py (100%) rename home/tests/unit/{ => apiv1}/test_dates.py (100%) rename home/tests/unit/{ => apiv1}/test_histogram.py (99%) rename home/tests/unit/{ => apiv1}/test_user.py (100%) rename home/tests/unit/{ => apiv1}/test_utils.py (86%) create mode 100644 home/tests/unit/apiv2/__init__.py create mode 100644 home/tests/unit/apiv2/api/__init__.py create mode 100644 home/tests/unit/apiv2/api/test_appuser.py create mode 100644 home/tests/unit/apiv2/api/test_leaderboard.py create mode 100644 home/tests/unit/apiv2/test_contest.py create mode 100644 home/tests/unit/apiv2/test_dates.py create mode 100644 home/tests/unit/apiv2/test_histogram.py create mode 100644 home/tests/unit/apiv2/test_user.py create mode 100644 home/tests/unit/apiv2/test_utils.py diff --git a/home/tests/__init__.py b/home/tests/__init__.py index 917000b9..e69de29b 100644 --- a/home/tests/__init__.py +++ b/home/tests/__init__.py @@ -1 +0,0 @@ -# home.tests diff --git a/home/tests/integration/__init__.py b/home/tests/integration/__init__.py index 34016c9e..e69de29b 100644 --- a/home/tests/integration/__init__.py +++ b/home/tests/integration/__init__.py @@ -1 +0,0 @@ -# home.tests.integration diff --git a/home/tests/integration/contest/__init__.py b/home/tests/integration/apiv1/__init__.py similarity index 100% rename from home/tests/integration/contest/__init__.py rename to home/tests/integration/apiv1/__init__.py diff --git a/home/tests/integration/views/__init__.py b/home/tests/integration/apiv1/appuser/__init__.py similarity index 100% rename from home/tests/integration/views/__init__.py rename to home/tests/integration/apiv1/appuser/__init__.py diff --git a/home/tests/integration/appuser/test_create.py b/home/tests/integration/apiv1/appuser/test_create.py similarity index 100% rename from home/tests/integration/appuser/test_create.py rename to home/tests/integration/apiv1/appuser/test_create.py diff --git a/home/tests/integration/appuser/test_delete.py b/home/tests/integration/apiv1/appuser/test_delete.py similarity index 100% rename from home/tests/integration/appuser/test_delete.py rename to home/tests/integration/apiv1/appuser/test_delete.py diff --git a/home/tests/integration/appuser/test_update.py b/home/tests/integration/apiv1/appuser/test_update.py similarity index 100% rename from home/tests/integration/appuser/test_update.py rename to home/tests/integration/apiv1/appuser/test_update.py diff --git a/home/tests/integration/views/api/__init__.py b/home/tests/integration/apiv1/contest/__init__.py similarity index 100% rename from home/tests/integration/views/api/__init__.py rename to home/tests/integration/apiv1/contest/__init__.py diff --git a/home/tests/integration/contest/test_current.py b/home/tests/integration/apiv1/contest/test_current.py similarity index 100% rename from home/tests/integration/contest/test_current.py rename to home/tests/integration/apiv1/contest/test_current.py diff --git a/home/tests/integration/apiv1/dailywalk/__init__.py b/home/tests/integration/apiv1/dailywalk/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv1/dailywalk/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/dailywalk/test_create.py b/home/tests/integration/apiv1/dailywalk/test_create.py similarity index 100% rename from home/tests/integration/dailywalk/test_create.py rename to home/tests/integration/apiv1/dailywalk/test_create.py diff --git a/home/tests/integration/dailywalk/test_get.py b/home/tests/integration/apiv1/dailywalk/test_get.py similarity index 100% rename from home/tests/integration/dailywalk/test_get.py rename to home/tests/integration/apiv1/dailywalk/test_get.py diff --git a/home/tests/integration/dailywalk/test_update.py b/home/tests/integration/apiv1/dailywalk/test_update.py similarity index 100% rename from home/tests/integration/dailywalk/test_update.py rename to home/tests/integration/apiv1/dailywalk/test_update.py diff --git a/home/tests/integration/apiv1/histogram/__init__.py b/home/tests/integration/apiv1/histogram/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv1/histogram/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/histogram/test_histogram.py b/home/tests/integration/apiv1/histogram/test_histogram.py similarity index 98% rename from home/tests/integration/histogram/test_histogram.py rename to home/tests/integration/apiv1/histogram/test_histogram.py index 89d17e68..150887d1 100644 --- a/home/tests/integration/histogram/test_histogram.py +++ b/home/tests/integration/apiv1/histogram/test_histogram.py @@ -1,7 +1,11 @@ from random import seed + from django.test import Client, TestCase -from home.tests.integration.views.api.utils import Login, generate_test_data +from home.tests.integration.apiv1.views.api.utils import ( + Login, + generate_test_data, +) class ApiTestCase(TestCase): diff --git a/home/tests/integration/views/web/__init__.py b/home/tests/integration/apiv1/intentionalwalk/__init__.py similarity index 100% rename from home/tests/integration/views/web/__init__.py rename to home/tests/integration/apiv1/intentionalwalk/__init__.py diff --git a/home/tests/integration/intentionalwalk/test_create.py b/home/tests/integration/apiv1/intentionalwalk/test_create.py similarity index 100% rename from home/tests/integration/intentionalwalk/test_create.py rename to home/tests/integration/apiv1/intentionalwalk/test_create.py diff --git a/home/tests/integration/intentionalwalk/test_get.py b/home/tests/integration/apiv1/intentionalwalk/test_get.py similarity index 100% rename from home/tests/integration/intentionalwalk/test_get.py rename to home/tests/integration/apiv1/intentionalwalk/test_get.py diff --git a/home/tests/unit/api/__init__.py b/home/tests/integration/apiv1/views/__init__.py similarity index 100% rename from home/tests/unit/api/__init__.py rename to home/tests/integration/apiv1/views/__init__.py diff --git a/home/tests/integration/apiv1/views/api/__init__.py b/home/tests/integration/apiv1/views/api/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv1/views/api/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/views/api/test_admin.py b/home/tests/integration/apiv1/views/api/test_admin.py similarity index 100% rename from home/tests/integration/views/api/test_admin.py rename to home/tests/integration/apiv1/views/api/test_admin.py diff --git a/home/tests/integration/views/api/test_export.py b/home/tests/integration/apiv1/views/api/test_export.py similarity index 100% rename from home/tests/integration/views/api/test_export.py rename to home/tests/integration/apiv1/views/api/test_export.py diff --git a/home/tests/integration/views/api/utils.py b/home/tests/integration/apiv1/views/api/utils.py similarity index 100% rename from home/tests/integration/views/api/utils.py rename to home/tests/integration/apiv1/views/api/utils.py diff --git a/home/tests/integration/apiv1/views/web/__init__.py b/home/tests/integration/apiv1/views/web/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/views/web/test_data.py b/home/tests/integration/apiv1/views/web/test_data.py similarity index 100% rename from home/tests/integration/views/web/test_data.py rename to home/tests/integration/apiv1/views/web/test_data.py diff --git a/home/tests/integration/apiv1/weeklygoal/__init__.py b/home/tests/integration/apiv1/weeklygoal/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv1/weeklygoal/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/weeklygoal/test_create.py b/home/tests/integration/apiv1/weeklygoal/test_create.py similarity index 100% rename from home/tests/integration/weeklygoal/test_create.py rename to home/tests/integration/apiv1/weeklygoal/test_create.py diff --git a/home/tests/integration/weeklygoal/test_get.py b/home/tests/integration/apiv1/weeklygoal/test_get.py similarity index 100% rename from home/tests/integration/weeklygoal/test_get.py rename to home/tests/integration/apiv1/weeklygoal/test_get.py diff --git a/home/tests/integration/apiv2/__init__.py b/home/tests/integration/apiv2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/appuser/__init__.py b/home/tests/integration/apiv2/appuser/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv2/appuser/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/apiv2/appuser/test_create.py b/home/tests/integration/apiv2/appuser/test_create.py new file mode 100644 index 00000000..b0e5df06 --- /dev/null +++ b/home/tests/integration/apiv2/appuser/test_create.py @@ -0,0 +1,276 @@ +from django.test import Client, TestCase + +from home.models import Account + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Url for creation + self.url = "/api/v2/appuser" + # Request parameters + self.request_params = { + "name": "John Doe", + "email": "john@blah.com", + "zip": "94105", + "age": 99, + "account_id": "12345", + } + self.expected_response = { + "name": "John Doe", + "email": "john@blah.com", + "zip": "94105", + "age": 99, + "is_tester": False, + "is_sf_resident": True, + } + # Content type + self.content_type = "application/json" + + # Test creation of a new app user + def test_create_appuser_success(self): + # Register the user + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Check object was created and matches expected values + user_obj = Account.objects.get(email=self.request_params["email"]) + + for field, expected_value in self.expected_response.items(): + self.assertEqual( + getattr(user_obj, field), expected_value, msg=f"{field}" + ) + + # Test creation of a new "Tester" app user + def test_create_tester_appuser_success(self): + # Set up request and response for a Tester user based in SF + request_params = self.request_params.copy() + request_params.update( + { + "name": "Tester John", + } + ) + expected_response = self.expected_response.copy() + expected_response.update( + { + "name": "Tester John", + "is_tester": True, + } + ) + + # Register the user + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Check object was created and matches expected values + user_obj = Account.objects.get(email=request_params["email"]) + + for field, expected_value in expected_response.items(): + self.assertEqual( + getattr(user_obj, field), expected_value, msg=f"{field}" + ) + + # Test creation of a duplicate user + # This should default to an update + def test_create_appuser_duplicate(self): + # Register the user first + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check object was created and matches expected values + user_obj = Account.objects.get(email=self.request_params["email"]) + for field in ["name", "email", "zip", "age"]: + self.assertEqual( + getattr(user_obj, field), + self.request_params[field], + msg=f"{field}", + ) + + # Create the same user again + # Create the same user but with different case email + # This should NOT create a new user record + request_params = {**self.request_params, "email": "John@blah.com"} + + # This should default to an UPDATE + response = self.client.post( + path=self.url, + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check object was updated and matches expected values + dupe_user_obj = Account.objects.get( + email__iexact=request_params["email"] + ) + for field in ["name", "zip", "age"]: + self.assertEqual( + getattr(dupe_user_obj, field), + request_params[field], + msg=f"{field}", + ) + self.assertEqual(user_obj.id, dupe_user_obj.id) + + # Test creation of the same user using a different device + # This should create a new account with the same email + def test_create_appuser_new_device(self): + + # Register the user first + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + user_obj = Account.objects.get(email=self.request_params["email"]) + for field in ["name", "email", "zip", "age"]: + self.assertEqual( + getattr(user_obj, field), + self.request_params[field], + msg=f"{field}", + ) + + # Create the same user but with a different account id + # This should NOT create a new user + request_params = { + **self.request_params, + "email": "John@blah.com", + "account_id": "54321", + } + + # Register the user first + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check object was updated and matches expected values + dupe_user_obj = Account.objects.get( + email__iexact=request_params["email"] + ) + for field in ["name", "zip", "age"]: + self.assertEqual( + getattr(dupe_user_obj, field), + request_params[field], + ) + self.assertEqual(dupe_user_obj.id, user_obj.id) + + # Test failure while create a new app with missing information + def test_create_appuser_failure_missing_field_age(self): + # Required fields for user creation + # Remove the age field + request_params = self.request_params.copy() + del request_params["age"] + + # Register the user + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "age", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test failure while create a new app with missing information + def test_create_appuser_failure_missing_field_device_id(self): + # Required fields for user creation + # Remove the account_id field + request_params = self.request_params.copy() + del request_params["account_id"] + + # Register the user + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "account_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test invalid method + def test_create_appuser_invalid_method(self): + # Required fields for user creation + request_params = self.request_params.copy() + + # Test not allowed get method + response = self.client.get( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=request_params["account_id"], + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) diff --git a/home/tests/integration/apiv2/appuser/test_delete.py b/home/tests/integration/apiv2/appuser/test_delete.py new file mode 100644 index 00000000..20fcc853 --- /dev/null +++ b/home/tests/integration/apiv2/appuser/test_delete.py @@ -0,0 +1,186 @@ +from django.test import Client, TestCase + +from home.models import Account, Device + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + + # Urls for creation and deletion + self.appuser_url = "/api/v2/appuser" + self.device_url = "/api/v2/device" + self.content_type = "application/json" + + # Constants + self.account_id1 = "12345" + self.account_id2 = "23456" + self.email1 = "john@blah.com" + self.email2 = "joe@blah.com" + + # Request parameters + base_params = { + "name": "John Doe", + "zip": "94132", + "age": 99, + } + self.request_params_user1 = base_params.copy() + self.request_params_user2 = base_params.copy() + self.request_params_user1.update( + { + "name": "John Doe", + "email": self.email1, + "account_id": self.account_id1, + } + ) + self.request_params_user2.update( + { + "name": "Joe Doe", + "email": self.email2, + "account_id": self.account_id2, + } + ) + + # Register the users + self.create_user_and_device_confirm_response(self.request_params_user1) + self.create_user_and_device_confirm_response(self.request_params_user2) + + self.existing_user_accts = [self.account_id1, self.account_id2] + + def tearDown(self) -> None: + for acct_id in self.existing_user_accts: + response = self.client.delete( + path=f"{self.appuser_url}/{acct_id}", + # data={"account_id": acct_id}, + content_type=self.content_type, + ) + + self.check_delete_success(response, acct_id) + + def create_user_and_device_confirm_response(self, request_params): + # Create the user + request_params = request_params.copy() + device_id = request_params["account_id"] + response = self.client.post( + path=self.appuser_url, + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check object was created and matches expected values + user_obj = Account.objects.get(email=request_params["email"]) + del request_params["account_id"] + for field, expected_value in request_params.items(): + self.assertEqual( + getattr(user_obj, field), expected_value, msg=f"{field}" + ) + + # Required request params for updating device info + request_params = { + "device_model": "iPhone16,1", + "manufacturer": "Apple", + "os_name": "iOS", + "os_version": "17.5", + "device_id": device_id, + } + # Update the user's device for the first time + response = self.client.put( + path=f"{self.device_url}/{device_id}", + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Check for a successful update in the database + device_obj = Device.objects.get(device_id=device_id) + for field, value in request_params.items(): + self.assertEqual( + getattr(device_obj, field), + value, + msg=f"field: {field} - Expected: {value} \ + Got: {getattr(device_obj, field)}", + ) + + def check_delete_success(self, response, deleted_account_id): + # Check for a successful delete response by the server + self.assertEqual(response.status_code, 204, msg=response) + + # Check user/device no longer exists by trying & failing to + # update the nonexistent user + patch_response = self.client.patch( + path=f"{self.appuser_url}/{deleted_account_id}", + data={"account_id": deleted_account_id}, + content_type=self.content_type, + ) + self.assertEqual(patch_response.status_code, 404, msg=patch_response) + + patch_response = self.client.patch( + path=f"{self.device_url}/{deleted_account_id}", + data={"device_id": deleted_account_id}, + content_type=self.content_type, + ) + self.assertEqual(patch_response.status_code, 404, msg=patch_response) + + def check_users_and_device_still_exist(self, valid_account_ids=[]): + # Check other users still exist + for acct_id in valid_account_ids: + # Update user + expected_success_update_response = self.client.patch( + path=f"{self.appuser_url}/{acct_id}", + data={"account_id": acct_id}, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(expected_success_update_response.status_code, 204) + + # Update device + expected_success_update_response = self.client.patch( + path=f"{self.device_url}/{acct_id}", + data={"device_id": acct_id}, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(expected_success_update_response.status_code, 204) + + def test_delete_user_success(self): + # Delete the first user + response = self.client.delete( + path=f"{self.appuser_url}/{self.account_id1}", + content_type=self.content_type, + ) + + self.check_delete_success(response, self.account_id1) + self.check_users_and_device_still_exist([self.account_id2]) + + self.existing_user_accts = [self.account_id2] + + def check_delete_failure(self, response, expected_msg): + # Check for a failed delete response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], expected_msg, msg=fail_message + ) + + def test_delete_user_failure_nonexistent(self): + # Delete nonexistent user + response = self.client.delete( + path=f"{self.appuser_url}/fakeID", + content_type=self.content_type, + ) + + self.check_delete_failure( + response, + expected_msg=f"Cannot find device registered with account_id: fakeID", + ) + self.check_users_and_device_still_exist( + [self.account_id1, self.account_id2] + ) diff --git a/home/tests/integration/apiv2/appuser/test_update.py b/home/tests/integration/apiv2/appuser/test_update.py new file mode 100644 index 00000000..5afb13ec --- /dev/null +++ b/home/tests/integration/apiv2/appuser/test_update.py @@ -0,0 +1,379 @@ +from django.test import Client, TestCase + +from home.models import Account + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Url for creation + self.url = "/api/v2/appuser" + # Constants + self.account_id = "12345" + self.email = "john@blah.com" + # Request parameters + self.request_params = { + "name": "John Doe", + "email": self.email, + "zip": "94132", + "age": 99, + "account_id": self.account_id, + } + # Content type + self.content_type = "application/json" + + # Register the user + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check object was created and matches expected values + user_obj = Account.objects.get(email=self.request_params["email"]) + + for field in ["name", "email", "zip", "age"]: + self.assertEqual( + getattr(user_obj, field), + self.request_params[field], + msg=f"field: {field} - Expected: {self.request_params[field]}\ + Got: {getattr(user_obj, field)}", + ) + + # Test updating a User's demographics + # First user screen (name, email, age) registers a user + # Additional demographics update with PUT. Test applies to these changes. + def test_update_appuser_demographics(self): + + # UPDATE USER DEMOGRAPHICS + request_params = { + **self.request_params, + "is_latino": "YE", + } + + # Register the user + response = self.client.put( + path=f"{self.url}/{self.account_id}", + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Check for a successful update in the database + user_obj = Account.objects.get(email=self.email) + + field = "is_latino" + self.assertEqual( + getattr(user_obj, field), + request_params[field], + msg=f"field: {field} - Expected: {request_params[field]} \ + Got: {getattr(user_obj, field)}", + ) + + request_params.update( + { + "gender": "TM", + "gender_other": None, + "race": ["BL", "OT"], + "race_other": "Some other race", + "sexual_orien": "DA", + "sexual_orien_other": None, + } + ) + + response = self.client.put( + path=f"{self.url}/{self.account_id}", + data=request_params, + content_type=self.content_type, + ) + self.assertEqual(response.status_code, 204) + user_obj = Account.objects.get(email=self.email) + + for field in [ + "gender", + "gender_other", + "race_other", + "sexual_orien", + "sexual_orien_other", + ]: + self.assertEqual( + getattr(user_obj, field), + request_params[field], + msg=f"field: {field} - Expected: {request_params[field]} \ + Got: {getattr(user_obj, field)}", + ) + self.assertSetEqual( + set(user_obj.race), + set(request_params["race"]), + msg=f"Expected: {request_params['race']} \ + Got: {set(user_obj.race)}", + ) + + # Test updating a User's age + # This would hit the same creation URL + def test_update_appuser_age(self): + # UPDATE THE USERS AGE + request_params = {**self.request_params, "age": 88} + + # Send the update + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Parse the response + response_data = response.json() + + self.assertEqual( + response_data["account_id"], + request_params["account_id"], + msg=f"Expecting: {request_params['account_id']} \ + Got: {response_data['account_id']}", + ) + user_obj = Account.objects.get(email=self.email) + for field in ["name", "email", "zip", "age"]: + self.assertEqual( + getattr(user_obj, field), + request_params[field], + msg=f"Expecting: {request_params[field]} \ + Got: {getattr(user_obj, field)}", + ) + + # Test updating a User's name + # This would hit the same creation URL + def test_update_appuser_name(self): + # UPDATE THE USERS NAME + request_params = {**self.request_params, "name": "Abhay"} + + # Send the update + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + + self.assertEqual( + response_data["account_id"], + request_params["account_id"], + msg=f"Expecting: {request_params['account_id']} \ + Got: {response_data['account_id']}", + ) + user_obj = Account.objects.get(email=self.email) + for field in ["name", "email", "zip", "age"]: + self.assertEqual( + getattr(user_obj, field), + request_params[field], + msg=f"Expecting: {request_params[field]} \ + Got: {getattr(user_obj, field)}", + ) + + # Test updating a User's email + # This shouldn't update + def test_update_appuser_email(self): + # UPDATE THE USERS EMAIL + request_params = { + **self.request_params, + "email": "abhaykashyap@blah.com", + } + + # Request to update user's emmail using post method + response = self.client.post( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 400) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Email cannot be updated. Contact admin", + msg=fail_message, + ) + + # Request to update user's emmail using patch method + response = self.client.patch( + path=f"{self.url}/{self.account_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 400) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Email cannot be updated. Contact admin", + msg=fail_message, + ) + + # Request to update user's emmail using put method + response = self.client.put( + path=f"{self.url}/{self.account_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 400) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Email cannot be updated. Contact admin", + msg=fail_message, + ) + + # Test updating User's age without providing account_id + # This should return a response with status "error" and a fail message + def test_update_appuser_missing_account_id(self): + # Required fields for user creation + request_params = {**self.request_params, "age": 88} + + # Test with account_id missing from path parameter + response = self.client.put( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Remove the account_id field + del request_params["account_id"] + + # Test with account_id missing from payload + response = self.client.put( + path=f"{self.url}/{self.account_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "account_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + def test_update_appuser_failure_nonexistent(self): + request_params = {**self.request_params, "account_id": "fakeID"} + + # Update nonexistent user using patch method + response = self.client.patch( + path=f"{self.url}/fakeID", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Cannot find device registered with account_id: fakeID", + msg=fail_message, + ) + + # Update nonexistent user using put method + response = self.client.put( + path=f"{self.url}/fakeID", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Cannot find device registered with account_id: fakeID", + msg=fail_message, + ) + + # Test invalid method - patch + def test_update_appuser_invalid_methods(self): + # Required fields for user creation + request_params = {**self.request_params, "age": 88} + + # Test get method + response = self.client.get( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test patch method + response = self.client.patch( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test patch method + response = self.client.put( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test delete method + response = self.client.delete( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) diff --git a/home/tests/integration/apiv2/contest/__init__.py b/home/tests/integration/apiv2/contest/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/contest/test_current.py b/home/tests/integration/apiv2/contest/test_current.py new file mode 100644 index 00000000..9d2bde27 --- /dev/null +++ b/home/tests/integration/apiv2/contest/test_current.py @@ -0,0 +1,138 @@ +from django.test import Client, TestCase +from freezegun import freeze_time + +from home.models import Contest + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Details for intentional walk list view + self.url = "/api/v2/contest/current" + # Content type + self.content_type = "application/json" + + def test_contest_current_error(self): + # Send the request + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "There are no contests", + msg=fail_message, + ) + + def test_contest_current(self): + # create a few contests + contest1 = Contest() + contest1.start_baseline = "2020-04-01" + contest1.start_promo = "2020-04-24" + contest1.start = "2020-05-01" + contest1.end = "2020-05-31" + contest1.save() + + contest2 = Contest() + contest2.start_baseline = "2020-06-01" + contest2.start_promo = "2020-06-21" + contest2.start = "2020-07-01" + contest2.end = "2020-07-31" + contest2.save() + + # before first promo starts, failure + with freeze_time("2020-04-01"): + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "There are no contests", + msg=fail_message, + ) + + # after promo starts for first contest + with freeze_time("2020-04-28"): + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["contest_id"], str(contest1.pk)) + self.assertEqual(response_data["start_promo"], "2020-04-24") + self.assertEqual(response_data["start"], "2020-05-01") + self.assertEqual(response_data["end"], "2020-05-31") + + # during first contest + with freeze_time("2020-05-15"): + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["contest_id"], str(contest1.pk)) + self.assertEqual(response_data["start_promo"], "2020-04-24") + self.assertEqual(response_data["start"], "2020-05-01") + self.assertEqual(response_data["end"], "2020-05-31") + + # after first contest, before promo starts for next + with freeze_time("2020-06-14"): + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["contest_id"], str(contest1.pk)) + self.assertEqual(response_data["start_promo"], "2020-04-24") + self.assertEqual(response_data["start"], "2020-05-01") + self.assertEqual(response_data["end"], "2020-05-31") + + # after promo starts for next + with freeze_time("2020-06-28"): + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["contest_id"], str(contest2.pk)) + self.assertEqual(response_data["start_promo"], "2020-06-21") + self.assertEqual(response_data["start"], "2020-07-01") + self.assertEqual(response_data["end"], "2020-07-31") + + # after last contest + with freeze_time("2020-08-14"): + response = self.client.get( + path=self.url, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual(response_data["contest_id"], str(contest2.pk)) + self.assertEqual(response_data["start_promo"], "2020-06-21") + self.assertEqual(response_data["start"], "2020-07-01") + self.assertEqual(response_data["end"], "2020-07-31") diff --git a/home/tests/integration/apiv2/dailywalk/__init__.py b/home/tests/integration/apiv2/dailywalk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/dailywalk/test_create.py b/home/tests/integration/apiv2/dailywalk/test_create.py new file mode 100644 index 00000000..2d17b7ba --- /dev/null +++ b/home/tests/integration/apiv2/dailywalk/test_create.py @@ -0,0 +1,338 @@ +from django.test import Client, TestCase +from freezegun import freeze_time + +from home.models import Account, Contest, Device + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + + # Device ID is passed to the API as "account_id", + self.device_id = "12345" + self.email = "abhay@blah.com" + self.request_params = { + "name": "Abhay Kashyap", + "email": self.email, + "zip": "94132", + "age": 99, + } + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={**self.request_params, "account_id": self.device_id}, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Check object was created and matches expected values + user_obj = Account.objects.get(email=self.email) + + for field, expected_value in self.request_params.items(): + self.assertEqual( + getattr(user_obj, field), expected_value, msg=f"{field}" + ) + + # Details for Daily walk even creation + self.url = "/api/v2/dailywalk" + # Request parameters + self.request_params = { + "account_id": "12345", + "daily_walks": [ + {"date": "3000-02-22", "steps": 500, "distance": 1.3} + ], + } + self.bulk_request_params = { + "account_id": "12345", + "daily_walks": [ + {"date": "3000-02-21", "steps": 1500, "distance": 2.1}, + {"date": "3000-02-22", "steps": 500, "distance": 0.8}, + {"date": "3000-02-23", "steps": 1000, "distance": 1.4}, + ], + } + # Content type + self.content_type = "application/json" + + # Test a successful creation of a daily walk (within a contest) + def test_create_dailywalk(self): + # Create a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + + # Verify that the user has no contests + acct = Device.objects.get(device_id=self.device_id).account + self.assertFalse(acct.contests.exists()) + + with freeze_time("3000-02-15"): + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["daily_walks"][0]["date"], + self.request_params["daily_walks"][0]["date"], + msg=fail_message, + ) + self.assertEqual( + response_data["daily_walks"][0]["steps"], + self.request_params["daily_walks"][0]["steps"], + msg=fail_message, + ) + self.assertEqual( + response_data["daily_walks"][0]["distance"], + self.request_params["daily_walks"][0]["distance"], + msg=fail_message, + ) + + # Verify that the user now has a contest + self.assertEqual(1, len(acct.contests.all())) + + # Test that a daily walk outside of a contest does not show up as + # contest participation for that user + def test_create_dailywalk_outside_contests(self): + # Create a contest + contest = Contest() + contest.start_baseline = None + contest.start_promo = "3000-03-01" + contest.start = "3000-03-01" + contest.end = "3000-03-31" + contest.save() + + # Verify that the user has no contests + acct = Device.objects.get(device_id=self.device_id).account + self.assertFalse(acct.contests.exists()) + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Verify that the user still has no contests + self.assertFalse(acct.contests.exists()) + + # Test that daily walk data sent as baseline data shows up as baseline data + def test_create_baseline_dailywalk(self): + # Create a contest + contest = Contest() + contest.start_baseline = "3000-02-01" + contest.start_promo = "3000-03-01" + contest.start = "3000-03-01" + contest.end = "3000-03-31" + contest.save() + + # Verify that the user has no contests + acct = Device.objects.get(device_id=self.device_id).account + self.assertFalse(acct.contests.exists()) + + # Verify that the user has no contests + self.assertFalse(acct.contests.exists()) + + # Send baseline data + baseline_data = { + "account_id": "12345", + "daily_walks": [ + {"date": "3000-02-22", "steps": 500, "distance": 1.3} + ], + } + response = self.client.post( + path=self.url, data=baseline_data, content_type=self.content_type + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Verify that the user still has no contests + self.assertFalse(acct.contests.exists()) + + # Test creation of a daily walk with an invalid user account + def test_create_dailywalk_invalidaccount(self): + + self.request_params["account_id"] = "0000000" + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Unregistered device - " + f'{self.request_params["account_id"]}.' + " Please register first!", + msg=fail_message, + ) + + # Test creation of a daily walk with a missing daily_walks field + def test_create_dailywalk_missing_daily_walks(self): + + del self.request_params["daily_walks"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "daily_walks", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test creation of a daily walk with a missing field + def test_create_dailywalk_missing_steps(self): + request_params = self.request_params.copy() + del request_params["daily_walks"][0]["steps"] + + # Send the request + response = self.client.post( + path=self.url, + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "daily_walks", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test a successful creation of a daily walk + def test_bulk_create_dailywalk(self): + + # Send the request + response = self.client.post( + path=self.url, + data=self.bulk_request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + len(response_data["daily_walks"]), + len(self.bulk_request_params["daily_walks"]), + ) + for i, daily_walk_data in enumerate(response_data["daily_walks"]): + self.assertEqual( + daily_walk_data["date"], + self.bulk_request_params["daily_walks"][i]["date"], + msg=fail_message, + ) + self.assertEqual( + daily_walk_data["steps"], + self.bulk_request_params["daily_walks"][i]["steps"], + msg=fail_message, + ) + self.assertEqual( + daily_walk_data["distance"], + self.bulk_request_params["daily_walks"][i]["distance"], + msg=fail_message, + ) + + # # Test invalid method + # def test_create_dailywalk_invalid_methods(self): + # # Test not allowed get method + # response = self.client.get( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 405) + # # Parse the response + # response_data = response.content + # fail_message = f"Server response - {response_data}" + # self.assertEqual( + # response_data, b"Method not allowed", msg=fail_message + # ) + + # # Test not allowed patch method + # response = self.client.patch( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 405) + # # Parse the response + # response_data = response.content + # fail_message = f"Server response - {response_data}" + # self.assertEqual( + # response_data, b"Method not allowed", msg=fail_message + # ) + + # # Test not allowed put method + # response = self.client.put( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 405) + # # Parse the response + # response_data = response.content + # fail_message = f"Server response - {response_data}" + # self.assertEqual( + # response_data, b"Method not allowed", msg=fail_message + # ) + + # # Test not allowed delete method + # response = self.client.delete( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 405) + # # Parse the response + # response_data = response.content + # fail_message = f"Server response - {response_data}" + # self.assertEqual( + # response_data, b"Method not allowed", msg=fail_message + # ) diff --git a/home/tests/integration/apiv2/dailywalk/test_get.py b/home/tests/integration/apiv2/dailywalk/test_get.py new file mode 100644 index 00000000..c26d15d3 --- /dev/null +++ b/home/tests/integration/apiv2/dailywalk/test_get.py @@ -0,0 +1,336 @@ +from django.test import Client, TestCase + +from home.models import Account + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + + # Device ID is passed to the API as "account_id", + self.device_id = "12345" + self.email = "abhay@blah.com" + self.request_params = { + "name": "Abhay Kashyap", + "email": self.email, + "zip": "94132", + "age": 99, + "account_id": self.device_id, + } + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data=self.request_params, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Create three daily walks + self.daily_walks = { + "account_id": "12345", + "daily_walks": [ + {"date": "2020-02-21", "steps": 1500, "distance": 2.1}, + {"date": "2020-02-22", "steps": 500, "distance": 0.8}, + {"date": "2020-02-23", "steps": 1000, "distance": 1.4}, + ], + } + # Create daily walks + response = self.client.post( + path="/api/v2/dailywalk", + data=self.daily_walks, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Details for Daily walk list view + self.url = "/api/v2/dailywalk" + # Request parameters + self.request_params = {"account_id": "12345"} + # Content type + self.content_type = "application/json" + + def test_dailywalk_get_failure(self): + self.request_params["account_id"] = "0000000" + + # Send the request + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}" + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Unregistered device - " + f'{self.request_params["account_id"]}.' + " Please register first!", + msg=fail_message, + ) + + def test_dailywalk_get_missing_account_id(self): + request_params = self.request_params.copy() + del request_params["account_id"] + + # Send the request + response = self.client.get(path=f"{self.url}/") + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn(b"Not Found", response_data, msg=fail_message) + + def test_dailywalk_get(self): + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertIn("daily_walks", response_data, msg=fail_message) + self.assertIn("total_steps", response_data, msg=fail_message) + self.assertIn("total_distance", response_data, msg=fail_message) + for walk in response_data["daily_walks"]: + self.assertIn("date", walk, msg=fail_message) + self.assertIn("steps", walk, msg=fail_message) + self.assertIn("distance", walk, msg=fail_message) + # Check if total steps is correct + self.assertEqual( + response_data["total_steps"], + sum([dw["steps"] for dw in self.daily_walks["daily_walks"]]), + msg=fail_message, + ) + # Check if total distance is correct + self.assertEqual( + response_data["total_distance"], + sum([dw["distance"] for dw in self.daily_walks["daily_walks"]]), + msg=fail_message, + ) + # Check if the number of events match + self.assertEqual( + len(response_data["daily_walks"]), + len(self.daily_walks["daily_walks"]), + msg=fail_message, + ) + # Check if they have the same exact data + # Note that the response will always be ordered by the latest date + self.assertEqual( + response_data["daily_walks"], + sorted( + [ + { + "date": dw["date"], + "steps": dw["steps"], + "distance": dw["distance"], + } + for dw in self.daily_walks["daily_walks"] + ], + key=lambda x: x["date"], + reverse=True, + ), + msg=fail_message, + ) + + # Check if walks across multiple accounts tied to the same + # email are aggregated + def test_dailywalk_get_aggregated(self): + + # Create a second account from a different device but + # with the same email + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": "54321", + }, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Create a third account from a different device but with the same email + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": "99999", + }, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Create additional walks + new_daily_walks = { + "account_id": "54321", + "daily_walks": [ + {"date": "2020-02-12", "steps": 444, "distance": 0.4}, + {"date": "2020-02-13", "steps": 666, "distance": 0.6}, + ], + } + response = self.client.post( + path="/api/v2/dailywalk", + data=new_daily_walks, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Update the daily walks for the tests to run + # Note that to aggregate data, the updated event for "2020-02-12" + # from account "99999" must be used instead of the update from "54321" + # So pop that entry from new_daily_walks & update daily walks + new_daily_walks["daily_walks"].pop(0) + self.daily_walks["daily_walks"] += new_daily_walks["daily_walks"] + + # Create another entry for the same first date but from a different + # account. This should create a new row but when aggregating data, + # this entry should supercede the older entry for the same day from + # the different account + new_daily_walks = { + "account_id": "99999", + "daily_walks": [ + {"date": "2020-02-14", "steps": 999, "distance": 0.9}, + {"date": "2020-02-12", "steps": 555, "distance": 0.5}, + ], + } + response = self.client.post( + path="/api/v2/dailywalk", + data=new_daily_walks, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Update the daily walks for the tests to run + self.daily_walks["daily_walks"] += new_daily_walks["daily_walks"] + + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}", + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertIn("daily_walks", response_data, msg=fail_message) + self.assertIn("total_steps", response_data, msg=fail_message) + self.assertIn("total_distance", response_data, msg=fail_message) + for walk in response_data["daily_walks"]: + self.assertIn("date", walk, msg=fail_message) + self.assertIn("steps", walk, msg=fail_message) + self.assertIn("distance", walk, msg=fail_message) + # Check if total steps is correct + self.assertEqual( + response_data["total_steps"], + sum([dw["steps"] for dw in self.daily_walks["daily_walks"]]), + msg=fail_message, + ) + # Check if total steps is correct + self.assertEqual( + response_data["total_distance"], + sum([dw["distance"] for dw in self.daily_walks["daily_walks"]]), + msg=fail_message, + ) + # Check if the number of events match + self.assertEqual( + len(response_data["daily_walks"]), + len(self.daily_walks["daily_walks"]), + msg=fail_message, + ) + # Check if they have the same exact data + # Note that the response will always be ordered by the latest date + self.assertEqual( + response_data["daily_walks"], + sorted( + [ + { + "date": dw["date"], + "steps": dw["steps"], + "distance": dw["distance"], + } + for dw in self.daily_walks["daily_walks"] + ], + key=lambda x: x["date"], + reverse=True, + ), + msg=fail_message, + ) + + # Test invalid method + def test_dailywalk_get_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) diff --git a/home/tests/integration/apiv2/dailywalk/test_update.py b/home/tests/integration/apiv2/dailywalk/test_update.py new file mode 100644 index 00000000..db6cea63 --- /dev/null +++ b/home/tests/integration/apiv2/dailywalk/test_update.py @@ -0,0 +1,77 @@ +from django.test import Client, TestCase + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": "12345", + }, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Details for Daily walk even creation + self.url = "/api/v2/dailywalk" + # Request parameters + self.request_params = { + "account_id": "12345", + "daily_walks": [ + {"date": "2020-02-22", "steps": 500, "distance": 1.3} + ], + } + # Content type + self.content_type = "application/json" + + # Create a daily walk + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Test creation of a daily walk for the same date twice + def test_update_steps_dailywalk_success(self): + + # Send the second request but ensure its an update + self.request_params["daily_walks"][0]["steps"] = 1000 + self.request_params["daily_walks"][0]["distance"] = 2.1 + + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["daily_walks"][0]["date"], + self.request_params["daily_walks"][0]["date"], + msg=fail_message, + ) + self.assertEqual( + response_data["daily_walks"][0]["steps"], + self.request_params["daily_walks"][0]["steps"], + msg=fail_message, + ) + self.assertEqual( + response_data["daily_walks"][0]["distance"], + self.request_params["daily_walks"][0]["distance"], + msg=fail_message, + ) diff --git a/home/tests/integration/apiv2/device/__init__.py b/home/tests/integration/apiv2/device/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/device/test_delete.py b/home/tests/integration/apiv2/device/test_delete.py new file mode 100644 index 00000000..ad29cb9a --- /dev/null +++ b/home/tests/integration/apiv2/device/test_delete.py @@ -0,0 +1,186 @@ +from django.test import Client, TestCase + +from home.models import Account, Device + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + + # Urls for creation and deletion + self.appuser_url = "/api/v2/appuser" + self.device_url = "/api/v2/device" + self.content_type = "application/json" + + # Constants + self.account_id1 = "12345" + self.account_id2 = "23456" + self.email1 = "john@blah.com" + self.email2 = "joe@blah.com" + + # Request parameters + base_params = { + "name": "John Doe", + "zip": "94132", + "age": 99, + } + self.request_params_user1 = base_params.copy() + self.request_params_user2 = base_params.copy() + self.request_params_user1.update( + { + "name": "John Doe", + "email": self.email1, + "account_id": self.account_id1, + } + ) + self.request_params_user2.update( + { + "name": "Joe Doe", + "email": self.email2, + "account_id": self.account_id2, + } + ) + + # Register the users + self.create_user_and_device_confirm_response(self.request_params_user1) + self.create_user_and_device_confirm_response(self.request_params_user2) + + self.existing_user_accts = [self.account_id1, self.account_id2] + + def tearDown(self) -> None: + for acct_id in self.existing_user_accts: + response = self.client.delete( + path=f"{self.appuser_url}/{acct_id}", + # data={"account_id": acct_id}, + content_type=self.content_type, + ) + + self.check_delete_success(response, acct_id) + + def create_user_and_device_confirm_response(self, request_params): + # Create the user + request_params = request_params.copy() + device_id = request_params["account_id"] + response = self.client.post( + path=self.appuser_url, + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check object was created and matches expected values + user_obj = Account.objects.get(email=request_params["email"]) + del request_params["account_id"] + for field, expected_value in request_params.items(): + self.assertEqual( + getattr(user_obj, field), expected_value, msg=f"{field}" + ) + + # Required request params for updating device info + request_params = { + "device_model": "iPhone16,1", + "manufacturer": "Apple", + "os_name": "iOS", + "os_version": "17.5", + "device_id": device_id, + } + # Update the user's device for the first time + response = self.client.put( + path=f"{self.device_url}/{device_id}", + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Check for a successful update in the database + device_obj = Device.objects.get(device_id=device_id) + for field, value in request_params.items(): + self.assertEqual( + getattr(device_obj, field), + value, + msg=f"field: {field} - Expected: {value} \ + Got: {getattr(device_obj, field)}", + ) + + def check_delete_success(self, response, deleted_account_id): + # Check for a successful delete response by the server + self.assertEqual(response.status_code, 204, msg=response) + + # Check user/device no longer exists by trying & failing to + # update the nonexistent user + patch_response = self.client.patch( + path=f"{self.appuser_url}/{deleted_account_id}", + data={"account_id": deleted_account_id}, + content_type=self.content_type, + ) + self.assertEqual(patch_response.status_code, 404, msg=patch_response) + + patch_response = self.client.patch( + path=f"{self.device_url}/{deleted_account_id}", + data={"device_id": deleted_account_id}, + content_type=self.content_type, + ) + self.assertEqual(patch_response.status_code, 404, msg=patch_response) + + def check_users_and_device_still_exist(self, valid_account_ids=[]): + # Check other users still exist + for acct_id in valid_account_ids: + # Update user + expected_success_update_response = self.client.patch( + path=f"{self.appuser_url}/{acct_id}", + data={"account_id": acct_id}, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(expected_success_update_response.status_code, 204) + + # Update device + expected_success_update_response = self.client.patch( + path=f"{self.device_url}/{acct_id}", + data={"device_id": acct_id}, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(expected_success_update_response.status_code, 204) + + def test_delete_device_success(self): + # Delete the first user + response = self.client.delete( + path=f"{self.device_url}/{self.account_id1}", + content_type=self.content_type, + ) + + self.check_delete_success(response, self.account_id1) + self.check_users_and_device_still_exist([self.account_id2]) + + self.existing_user_accts = [self.account_id2] + + def check_delete_failure(self, response, expected_msg): + # Check for a failed delete response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], expected_msg, msg=fail_message + ) + + def test_delete_user_failure_nonexistent(self): + # Delete nonexistent user + response = self.client.delete( + path=f"{self.device_url}/fakeID", + content_type=self.content_type, + ) + + self.check_delete_failure( + response, + expected_msg=f"Unregistered device - device_id: fakeID. Please register first!", + ) + self.check_users_and_device_still_exist( + [self.account_id1, self.account_id2] + ) diff --git a/home/tests/integration/apiv2/device/test_update.py b/home/tests/integration/apiv2/device/test_update.py new file mode 100644 index 00000000..42cfa45f --- /dev/null +++ b/home/tests/integration/apiv2/device/test_update.py @@ -0,0 +1,287 @@ +from django.test import Client, TestCase + +from home.models import Device + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Constants + self.device_id = "12345" + + # Content type + self.content_type = "application/json" + + # Register the user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "John Doe", + "email": "john@blah.com", + "zip": "94102", + "age": 99, + "account_id": self.device_id, + }, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Url for device update + self.url = "/api/v2/device" + + self.request_params = { + "device_model": "iPhone16,1", + "manufacturer": "Apple", + "os_name": "iOS", + "os_version": "17.5", + } + + # Test patching a User's device + def test_update_device_patch(self): + for field, value in self.request_params.items(): + # Update value + request_params = { + "device_id": self.device_id, + field: value, + } + + # Register the user + response = self.client.patch( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Check for a successful update in the database + device_obj = Device.objects.get(device_id=self.device_id) + self.assertEqual( + getattr(device_obj, field), + value, + msg=f"field: {field} - Expected: {value} \ + Got: {getattr(device_obj, field)}", + ) + + # Test updating a User's device using put method + def test_update_device_put(self): + # Update value + request_params = { + **self.request_params, + "device_id": self.device_id, + } + # Update the user's device for the first time + response = self.client.put( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Check for a successful update in the database + device_obj = Device.objects.get(device_id=self.device_id) + for field, value in self.request_params.items(): + self.assertEqual( + getattr(device_obj, field), + value, + msg=f"field: {field} - Expected: {value} \ + Got: {getattr(device_obj, field)}", + ) + + # Change OS version + request_params["os_version"] = "17.6.1" + + # Update the user's device os_version + response = self.client.put( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Check for a successful update in the database + device_obj = Device.objects.get(device_id=self.device_id) + self.assertEqual(getattr(device_obj, "os_version"), "17.6.1") + for field, value in request_params.items(): + self.assertEqual( + getattr(device_obj, field), + value, + msg=f"field: {field} - Expected: {value} \ + Got: {getattr(device_obj, field)}", + ) + + # Test updating User's device without providing device_id + def test_update_device_missing_account_id(self): + # Required request parameters + request_params = { + **self.request_params, + "device_id": self.device_id, + } + # Update the user's device for the first time + response = self.client.put( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 204) + + # Test with device_id missing from path parameter + # Patch method + response = self.client.patch( + path=f"{self.url}/", + data=request_params, + content_type=self.content_type, + ) + # Check for a 404 response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn(b"Not Found", response_data, msg=fail_message) + + # Test with device_id missing from path parameter + # Put method + response = self.client.put( + path=f"{self.url}/", + data=request_params, + content_type=self.content_type, + ) + # Check for a 404 response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn(b"Not Found", response_data, msg=fail_message) + + # Remove the device_id field + del request_params["device_id"] + + # Test with device_id missing from payload + # Patch method + response = self.client.patch( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a 422 response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "device_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test with device_id missing from payload + # Put method + response = self.client.put( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a 422 response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "device_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + def test_update_device_invalid_device_id(self): + # Required request parameters + request_params = { + **self.request_params, + "device_id": "fakeID", + } + + # Update nonexistent user using patch method + response = self.client.patch( + path=f"{self.url}/fakeID", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Unregistered device - device_id: fakeID. Please register first!", + msg=fail_message, + ) + + # Update nonexistent user using put method + response = self.client.put( + path=f"{self.url}/fakeID", + data=request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + + self.assertEqual( + response_data["detail"], + "Unregistered device - device_id: fakeID. Please register first!", + msg=fail_message, + ) + + # Test invalid methods + def test_device_invalid_methods(self): + # Required fields for device + request_params = {**self.request_params, "device_id": self.device_id} + + # Test get method + response = self.client.get( + path=f"{self.url}/{self.device_id}", + data=request_params, + content_type=self.content_type, + ) + # Check for a 405 response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test get method + response = self.client.get( + path=self.url, data=request_params, content_type=self.content_type + ) + # Check for a 404 response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn(b"Not Found", response_data, msg=fail_message) diff --git a/home/tests/integration/apiv2/histogram/__init__.py b/home/tests/integration/apiv2/histogram/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/histogram/test_histogram.py b/home/tests/integration/apiv2/histogram/test_histogram.py new file mode 100644 index 00000000..012bb3c6 --- /dev/null +++ b/home/tests/integration/apiv2/histogram/test_histogram.py @@ -0,0 +1,217 @@ +from random import seed + +from django.test import Client, TestCase + +from home.tests.integration.apiv2.views.api.utils import ( + Login, + generate_test_data, +) + + +class ApiTestCase(TestCase): + def setUp(self): + seed(123) + + contest_id = generate_test_data() + self.contest_id = contest_id + self.client = Client() + self.client.login( + username=Login.username, + password=Login.password, + ) + # self.client = Client() + # self.user = User.objects.create_superuser( + # username="testUser", + # password="testpass", + # ) + # self.client.force_authenticate(user=self.user) + self.content_type = "application/json" + + def tearDown(self) -> None: + seed() + return super().tearDown() + + def test_happy_paths(self): + test_cases = [ + { + "path": "/api/v2/admin/users/histogram", + "data": { + "field": "age", + "bin_size": 10, + "contest_id": self.contest_id, + }, + "expect": { + "data": [ + { + "bin_idx": 0, + "bin_start": 0, + "bin_end": 10, + "count": 0, + }, + { + "bin_idx": 1, + "bin_start": 10, + "bin_end": 20, + "count": 0, + }, + { + "bin_idx": 2, + "bin_start": 20, + "bin_end": 30, + "count": 1, + }, + { + "bin_idx": 3, + "bin_start": 30, + "bin_end": 40, + "count": 0, + }, + { + "bin_idx": 4, + "bin_start": 40, + "bin_end": 50, + "count": 0, + }, + { + "bin_idx": 5, + "bin_start": 50, + "bin_end": 60, + "count": 1, + }, + { + "bin_idx": 6, + "bin_start": 60, + "bin_end": 70, + "count": 1, + }, + ], + "unit": "years", + "bin_size": 10, + }, + }, + { + "path": "/api/v2/admin/users/histogram", + "data": { + "field": "age", + "bin_custom": "0,10,20,30,40,50,60,70,80", + "contest_id": self.contest_id, + }, + "expect": { + "data": [ + { + "bin_start": 0, + "bin_end": 10, + "count": 0, + "bin_idx": 0, + }, + { + "bin_start": 10, + "bin_end": 20, + "count": 0, + "bin_idx": 1, + }, + { + "bin_start": 20, + "bin_end": 30, + "bin_idx": 2, + "count": 1, + }, + { + "bin_start": 30, + "bin_end": 40, + "count": 0, + "bin_idx": 3, + }, + { + "bin_start": 40, + "bin_end": 50, + "count": 0, + "bin_idx": 4, + }, + { + "bin_start": 50, + "bin_end": 60, + "bin_idx": 5, + "count": 1, + }, + { + "bin_start": 60, + "bin_end": 70, + "bin_idx": 6, + "count": 1, + }, + { + "bin_start": 70, + "bin_end": 80, + "count": 0, + "bin_idx": 7, + }, + {"bin_start": 80, "count": 0, "bin_idx": 8}, + ], + "unit": "years", + "bin_custom": [0, 10, 20, 30, 40, 50, 60, 70, 80], + }, + }, + ] + for test_case in test_cases: + with self.subTest(msg=test_case["path"]): + response = self.client.get( + path=test_case["path"], + data=test_case["data"], + content_type=self.content_type, + ) + self.assertEqual( + response.status_code, + 200, + f"Received non-200 response: {response.json()}", + ) + print(response.json()) + self.assertEqual(response.json(), test_case["expect"]) + + def test_unsupported_paths(self): + response = self.client.get( + path="/api/v2/admin/NOTSUPPORTEDMODELXD/histogram", + data={ + "field": "age", + "bin_size": 10, + "contest_id": self.contest_id, + }, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Invalid model and/or NOTSUPPORTEDMODELXD does not support histograms", + msg=fail_message, + ) + + response = self.client.get( + path="/api/v2/admin/users/histogram", + data={ + "AIN'T GOT NO FIELDS": "NOPE", + }, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "query", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertIn( + "field", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) diff --git a/home/tests/integration/apiv2/intentionalwalk/__init__.py b/home/tests/integration/apiv2/intentionalwalk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/intentionalwalk/test_create.py b/home/tests/integration/apiv2/intentionalwalk/test_create.py new file mode 100644 index 00000000..9dcc6987 --- /dev/null +++ b/home/tests/integration/apiv2/intentionalwalk/test_create.py @@ -0,0 +1,263 @@ +from django.test import Client, TestCase + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": "12345", + }, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Details for Daily walk even creation + self.url = "/api/v2/intentionalwalk" + # Request parameters + self.request_params = { + "account_id": "12345", + "intentional_walks": [ + { + "event_id": "8888", + "start": "2020-02-21T12:15:00-05:00", + "end": "2020-02-21T12:45:00-05:00", + "steps": 500, + "distance": 1.3, + "pause_time": 456, + } + ], + } + # Content type + self.content_type = "application/json" + + # Test a successful creation of a intentional walk + def test_create_intentionalwalk(self): + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["account_id"], + self.request_params["account_id"], + msg=fail_message, + ) + self.assertEqual( + response_data["intentional_walks"][0]["event_id"], + self.request_params["intentional_walks"][0]["event_id"], + msg=fail_message, + ) + self.assertEqual( + response_data["intentional_walks"][0]["start"], + self.request_params["intentional_walks"][0]["start"], + msg=fail_message, + ) + self.assertEqual( + response_data["intentional_walks"][0]["end"], + self.request_params["intentional_walks"][0]["end"], + msg=fail_message, + ) + self.assertEqual( + response_data["intentional_walks"][0]["steps"], + self.request_params["intentional_walks"][0]["steps"], + msg=fail_message, + ) + self.assertEqual( + response_data["intentional_walks"][0]["pause_time"], + self.request_params["intentional_walks"][0]["pause_time"], + msg=fail_message, + ) + self.assertEqual( + response_data["intentional_walks"][0]["distance"], + self.request_params["intentional_walks"][0]["distance"], + msg=fail_message, + ) + + # Test creation of a intentional walk with an invalid user account + def test_create_intentionalwalk_invalidaccount(self): + + self.request_params["account_id"] = "0000000" + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Unregistered device - " + f'{self.request_params["account_id"]}. ' + "Please register first!", + msg=fail_message, + ) + + # Test creation of a intentional walk with a missing steps field + def test_create_intentionalwalk_missing_steps(self): + + del self.request_params["intentional_walks"][0]["steps"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "steps", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test creation of a intentional walk with a missing account_id field + def test_create_intentionalwalk_missing_account_id(self): + + del self.request_params["account_id"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "account_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test creation of a intentional walk with a missing intentional_walks field + def test_create_intentionalwalk_missing_intentional_walks(self): + + del self.request_params["intentional_walks"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "intentional_walks", + response_data["detail"][0]["loc"], + msg=fail_message, + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test invalid method + def test_create_intentionalwalk_invalid_methods(self): + # Test not allowed get method + response = self.client.get( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) diff --git a/home/tests/integration/apiv2/intentionalwalk/test_get.py b/home/tests/integration/apiv2/intentionalwalk/test_get.py new file mode 100644 index 00000000..29ed6b2e --- /dev/null +++ b/home/tests/integration/apiv2/intentionalwalk/test_get.py @@ -0,0 +1,406 @@ +from dateutil import parser +from django.test import Client, TestCase + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": "12345", + }, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Create three intentional walks + self.intentional_walks = { + "account_id": "12345", + "intentional_walks": [ + { + "event_id": "1111", + "start": "2020-02-21T12:15:00-05:00", + "end": "2020-02-21T12:45:00-05:00", + "steps": 1500, + "distance": 1.3, + "pause_time": 23.5, + }, + { + "event_id": "2222", + "start": "2020-02-21T15:20:00-05:00", + "end": "2020-02-21T15:50:00-05:00", + "steps": 500, + "distance": 0.4, + "pause_time": 53.5, + }, + { + "event_id": "3333", + "start": "2020-02-22T13:15:00-05:00", + "end": "2020-02-22T13:45:00-05:00", + "steps": 1000, + "distance": 0.7, + "pause_time": 100.1, + }, + ], + } + # Create intentional walks + response = self.client.post( + path="/api/v2/intentionalwalk", + data=self.intentional_walks, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Details for intentional walk list view + self.url = "/api/v2/intentionalwalk" + # Request parameters + self.request_params = {"account_id": "12345"} + # Content type + self.content_type = "application/json" + + def test_intentionalwalk_get_failure(self): + self.request_params["account_id"] = "0000000" + + # Send the request + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}", + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Unregistered device - " + f"{self.request_params['account_id']}." + " Please register first!", + msg=fail_message, + ) + + def test_intentionalwalk_get(self): + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}", + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertIn("intentional_walks", response_data, msg=fail_message) + self.assertIn("total_steps", response_data, msg=fail_message) + self.assertIn("total_distance", response_data, msg=fail_message) + self.assertIn("total_walk_time", response_data, msg=fail_message) + self.assertIn("total_pause_time", response_data, msg=fail_message) + for walk in response_data["intentional_walks"]: + self.assertIn("start", walk, msg=fail_message) + self.assertIn("end", walk, msg=fail_message) + self.assertIn("steps", walk, msg=fail_message) + self.assertIn("distance", walk, msg=fail_message) + self.assertIn("walk_time", walk, msg=fail_message) + self.assertIn("pause_time", walk, msg=fail_message) + # Check if total steps is correct + self.assertEqual( + response_data["total_steps"], + sum( + [ + dw["steps"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + self.assertEqual( + response_data["total_distance"], + sum( + [ + dw["distance"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + self.assertEqual( + response_data["total_pause_time"], + sum( + [ + dw["pause_time"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + self.assertEqual( + response_data["total_walk_time"], + sum( + [ + ( + parser.parse(dw["end"]) - parser.parse(dw["start"]) + ).total_seconds() + - dw["pause_time"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + # Check if the number of events match + self.assertEqual( + len(response_data["intentional_walks"]), + len(self.intentional_walks["intentional_walks"]), + msg=fail_message, + ) + + # Check if they have the same exact data + # Remove timestamp strings since formatting is different + self.assertEqual( + [ + { + "steps": dw["steps"], + "distance": dw["distance"], + "pause_time": dw["pause_time"], + } + for dw in sorted( + response_data["intentional_walks"], + key=lambda x: x["start"], + reverse=True, + ) + ], + [ + { + "steps": dw["steps"], + "distance": dw["distance"], + "pause_time": dw["pause_time"], + } + for dw in sorted( + self.intentional_walks["intentional_walks"], + key=lambda x: x["start"], + reverse=True, + ) + ], + msg=fail_message, + ) + + # Check if walks across multiple accounts tied to the same + # email are aggregated + def test_intentionalwalk_get_aggregated(self): + + # Create a second account from a different device but with + # the same email + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94103", + "age": 99, + "account_id": "54321", + }, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Create a third account from a different device but with the same email + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94105", + "age": 99, + "account_id": "99999", + }, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Create additional walks + new_intentional_walks = { + "account_id": "54321", + "intentional_walks": [ + { + "event_id": "4444", + "start": "2020-02-21T18:15:00-05:00", + "end": "2020-02-21T18:45:00-05:00", + "steps": 700, + "distance": 0.7, + "pause_time": 600, + }, + { + "event_id": "8888", + "start": "2020-02-23T15:20:00-05:00", + "end": "2020-02-23T15:50:00-05:00", + "steps": 900, + "distance": 0.8, + "pause_time": 400, + }, + ], + } + self.intentional_walks["intentional_walks"] += new_intentional_walks[ + "intentional_walks" + ] + response = self.client.post( + path="/api/v2/intentionalwalk", + data=new_intentional_walks, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + new_intentional_walks = { + "account_id": "99999", + "intentional_walks": [ + { + "event_id": "9999", + "start": "2020-02-24T13:15:00-05:00", + "end": "2020-02-24T13:45:00-05:00", + "steps": 1000, + "distance": 1.1, + "pause_time": 600.5, + }, + ], + } + self.intentional_walks["intentional_walks"] += new_intentional_walks[ + "intentional_walks" + ] + response = self.client.post( + path="/api/v2/intentionalwalk", + data=new_intentional_walks, + content_type="application/json", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertIn("intentional_walks", response_data, msg=fail_message) + self.assertIn("total_steps", response_data, msg=fail_message) + self.assertIn("total_distance", response_data, msg=fail_message) + self.assertIn("total_walk_time", response_data, msg=fail_message) + self.assertIn("total_pause_time", response_data, msg=fail_message) + for walk in response_data["intentional_walks"]: + self.assertIn("start", walk, msg=fail_message) + self.assertIn("end", walk, msg=fail_message) + self.assertIn("steps", walk, msg=fail_message) + self.assertIn("distance", walk, msg=fail_message) + self.assertIn("walk_time", walk, msg=fail_message) + self.assertIn("pause_time", walk, msg=fail_message) + # Check if total steps is correct + self.assertEqual( + response_data["total_steps"], + sum( + [ + dw["steps"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + self.assertEqual( + response_data["total_distance"], + sum( + [ + dw["distance"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + self.assertEqual( + response_data["total_pause_time"], + sum( + [ + dw["pause_time"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + self.assertEqual( + response_data["total_walk_time"], + sum( + [ + ( + parser.parse(dw["end"]) - parser.parse(dw["start"]) + ).total_seconds() + - dw["pause_time"] + for dw in self.intentional_walks["intentional_walks"] + ] + ), + msg=fail_message, + ) + # Check if the number of events match + self.assertEqual( + len(response_data["intentional_walks"]), + len(self.intentional_walks["intentional_walks"]), + msg=fail_message, + ) + + # Check if they have the same exact data + # Remove timestamp strings since formatting is different + self.assertEqual( + [ + { + "steps": dw["steps"], + "distance": dw["distance"], + "pause_time": dw["pause_time"], + } + for dw in sorted( + response_data["intentional_walks"], + key=lambda x: x["start"], + reverse=True, + ) + ], + [ + { + "steps": dw["steps"], + "distance": dw["distance"], + "pause_time": dw["pause_time"], + } + for dw in sorted( + self.intentional_walks["intentional_walks"], + key=lambda x: x["start"], + reverse=True, + ) + ], + msg=fail_message, + ) + + # Test request with a missing account_id field + def test_intentionalwalk_get_missing_account_id(self): + + del self.request_params["account_id"] + + # Send the request + response = self.client.get( + path=f"{self.url}/", + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn(b"Not Found", response_data, msg=fail_message) diff --git a/home/tests/integration/apiv2/views/__init__.py b/home/tests/integration/apiv2/views/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv2/views/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/apiv2/views/api/__init__.py b/home/tests/integration/apiv2/views/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/views/api/test_admin.py b/home/tests/integration/apiv2/views/api/test_admin.py new file mode 100644 index 00000000..79b57d9f --- /dev/null +++ b/home/tests/integration/apiv2/views/api/test_admin.py @@ -0,0 +1,400 @@ +import logging +from random import seed + +from django.test import Client, TestCase + +from .utils import Login, generate_test_data + +logger = logging.getLogger(__name__) + + +class TestAdminViews(TestCase): + contest0_id = None + + def setUp(self): + seed(1) + + def tearDown(self) -> None: + seed() + return super().tearDown() + + @classmethod + def setUpTestData(cls): + cls.contest0_id = generate_test_data() + + def test_get_me(self): + c = Client() + # when unauthenticated, returns an empty response + response = c.get("/api/v2/admin/me") + self.assertEqual(response.status_code, 204) + + # log in + self.assertTrue(Login.login(c)) + + # when authenticated, returns the JSON representation of the user + response = c.get("/api/v2/admin/me") + data = response.json() + self.assertEqual(data["username"], "testadmin") + + def test_get_home(self): + c = Client() + # when unauthenticated, returns an empty response + response = c.get("/api/v2/admin/home") + self.assertEqual(response.status_code, 401) + + # log in + self.assertTrue(Login.login(c)) + response = c.get("/api/v2/admin/home") + data = response.json() + self.assertEqual( + data, + { + "accounts_count": 5, # 6 accounts - 1 tester + "accounts_steps": 350000, # 14 days * (10,000 + 15,000 steps/day) + "accounts_distance": 280000, # 14 days * (8,000 + 12,000 meters/day) + }, + ) + + def test_get_home_users_daily(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get( + f"/api/v2/admin/home/users/daily?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + [ + ["Date", "Count"], + ["3000-02-28T00:00:00", 0], + ["3000-03-02T00:00:00", 3], + ["3000-03-14T00:00:00", 0], + ], + ) + + def test_get_home_users_cumulative(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get( + f"/api/v2/admin/home/users/cumulative?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + [ + ["Date", "Count"], + ["3000-02-28T00:00:00", 0], + ["3000-03-02T00:00:00", 3], + ["3000-03-14T00:00:00", 3], + ], + ) + + def test_get_home_steps_daily(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get( + f"/api/v2/admin/home/steps/daily?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + [ + ["Date", "Count"], + ["3000-02-28T00:00:00", 25000], + ["3000-03-01T00:00:00", 25000], + ["3000-03-02T00:00:00", 25000], + ["3000-03-03T00:00:00", 25000], + ["3000-03-04T00:00:00", 25000], + ["3000-03-05T00:00:00", 25000], + ["3000-03-06T00:00:00", 25000], + ["3000-03-07T00:00:00", 25000], + ["3000-03-08T00:00:00", 25000], + ["3000-03-09T00:00:00", 25000], + ["3000-03-10T00:00:00", 25000], + ["3000-03-11T00:00:00", 25000], + ["3000-03-12T00:00:00", 25000], + ["3000-03-13T00:00:00", 25000], + ["3000-03-14T00:00:00", 0], + ], + ) + + def test_get_home_steps_daily_invalid_contest_id(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get(f"/api/v2/admin/home/steps/daily?contest_id=invalid") + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn( + b"Cannot find contest with contest_id invalid", + response_data, + msg=fail_message, + ) + + def test_get_home_steps_cumulative(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get( + f"/api/v2/admin/home/steps/cumulative?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + [ + ["Date", "Count"], + ["3000-02-28T00:00:00", 25000], + ["3000-03-01T00:00:00", 50000], + ["3000-03-02T00:00:00", 75000], + ["3000-03-03T00:00:00", 100000], + ["3000-03-04T00:00:00", 125000], + ["3000-03-05T00:00:00", 150000], + ["3000-03-06T00:00:00", 175000], + ["3000-03-07T00:00:00", 200000], + ["3000-03-08T00:00:00", 225000], + ["3000-03-09T00:00:00", 250000], + ["3000-03-10T00:00:00", 275000], + ["3000-03-11T00:00:00", 300000], + ["3000-03-12T00:00:00", 325000], + ["3000-03-13T00:00:00", 350000], + ["3000-03-14T00:00:00", 350000], + ], + ) + + def test_get_home_distance_daily(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get( + f"/api/v2/admin/home/distance/daily?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + [ + ["Date", "Count"], + ["3000-02-28T00:00:00", 20000], + ["3000-03-01T00:00:00", 20000], + ["3000-03-02T00:00:00", 20000], + ["3000-03-03T00:00:00", 20000], + ["3000-03-04T00:00:00", 20000], + ["3000-03-05T00:00:00", 20000], + ["3000-03-06T00:00:00", 20000], + ["3000-03-07T00:00:00", 20000], + ["3000-03-08T00:00:00", 20000], + ["3000-03-09T00:00:00", 20000], + ["3000-03-10T00:00:00", 20000], + ["3000-03-11T00:00:00", 20000], + ["3000-03-12T00:00:00", 20000], + ["3000-03-13T00:00:00", 20000], + ["3000-03-14T00:00:00", 0], + ], + ) + + def test_get_home_distance_cumulative(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get( + f"/api/v2/admin/home/distance/cumulative?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + [ + ["Date", "Count"], + ["3000-02-28T00:00:00", 20000], + ["3000-03-01T00:00:00", 40000], + ["3000-03-02T00:00:00", 60000], + ["3000-03-03T00:00:00", 80000], + ["3000-03-04T00:00:00", 100000], + ["3000-03-05T00:00:00", 120000], + ["3000-03-06T00:00:00", 140000], + ["3000-03-07T00:00:00", 160000], + ["3000-03-08T00:00:00", 180000], + ["3000-03-09T00:00:00", 200000], + ["3000-03-10T00:00:00", 220000], + ["3000-03-11T00:00:00", 240000], + ["3000-03-12T00:00:00", 260000], + ["3000-03-13T00:00:00", 280000], + ["3000-03-14T00:00:00", 280000], + ], + ) + + def test_get_contests(self): + c = Client() + self.assertTrue(Login.login(c)) + response = c.get("/api/v2/admin/contests") + data = response.json() + self.assertEqual(len(data), 2) + self.assertEqual(data[0]["contest_id"], self.contest0_id) + + def test_get_users(self): + c = Client() + self.assertTrue(Login.login(c)) + + response = c.get("/api/v2/admin/users") + data = (response.json())["users"] + self.assertEqual(response.status_code, 200) + self.assertEqual(len(data), 5) # 6 accounts - 1 tester + + response = c.get( + f"/api/v2/admin/users?contest_id={self.contest0_id}&is_tester=invalid" + ) + self.assertEqual(response.status_code, 422) + + response = c.get(f"/api/v2/admin/users?contest_id={self.contest0_id}") + self.assertEqual(response.status_code, 200) + data = (response.json())["users"] + self.assertEqual(len(data), 4) # 5 accounts in the contest - 1 tester + # default ordering is by name + self.assertEqual(data[0]["name"], "User 2") + self.assertEqual(data[0]["is_new"], False) + self.assertEqual(data[0]["dw_count"], 7) + self.assertEqual(data[0]["dw_steps"], 70000) + self.assertEqual(data[0]["dw_distance"], 56000) + self.assertEqual(data[1]["name"], "User 3") + self.assertEqual(data[1]["is_new"], True) + self.assertEqual(data[1]["dw_count"], 7) + self.assertEqual(data[1]["dw_steps"], 105000) + self.assertEqual(data[1]["dw_distance"], 84000) + self.assertEqual(data[2]["name"], "User 4") + self.assertEqual(data[2]["is_new"], True) + self.assertEqual(data[2]["dw_count"], 0) + self.assertEqual(data[2]["dw_steps"], 0) + self.assertEqual(data[2]["dw_distance"], 0) + self.assertEqual(data[3]["name"], "User 5") + self.assertEqual(data[3]["is_new"], True) + self.assertEqual(data[3]["dw_count"], 0) + self.assertEqual(data[3]["dw_steps"], 0) + self.assertEqual(data[3]["dw_distance"], 0) + + response = c.get( + f"/api/v2/admin/users?contest_id={self.contest0_id}&is_tester=true" + ) + self.assertEqual(response.status_code, 200) + data = (response.json())["users"] + self.assertEqual(len(data), 1) # 1 tester + # query + response = c.get("/api/v2/admin/users?query=User 2") + self.assertEqual(response.status_code, 200) + data = (response.json())["users"] + + self.assertEqual(len(data), 1) + self.assertEqual(data[0]["name"], "User 2") + + response = c.get("/api/v2/admin/users?query=aintgonfindmeatall") + self.assertEqual(response.status_code, 200) + data = (response.json())["users"] + self.assertEqual(len(data), 0) + + # sort + response = c.get("/api/v2/admin/users?order_by=age") + data = (response.json())["users"] + self.assertEqual(response.status_code, 200) + ages = [user["age"] for user in data] + ascending_order = all(a <= b for a, b in zip(ages, ages[1:])) + self.assertTrue(ascending_order) + + response = c.get("/api/v2/admin/users?order_by=-age") + data = (response.json())["users"] + self.assertEqual(ages, [user["age"] for user in data[::-1]]) + + def test_get_users_by_zip(self): + c = Client() + # when unauthenticated, returns status code 401 + response = c.get("/api/v2/admin/users/zip") + self.assertEqual(response.status_code, 401) + + # authenticated + self.assertTrue(Login.login(c)) + + response = c.get(f"/api/v2/admin/users/zip") + data = response.json() + self.assertEqual( + data, + { + "total": {"94102": 1, "94103": 2, "94104": 2}, + }, + ) + + response = c.get( + f"/api/v2/admin/users/zip?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + { + "total": {"94103": 2, "94104": 2}, + "new": {"94103": 1, "94104": 2}, + }, + ) + + def test_get_users_active_by_zip(self): + c = Client() + # when unauthenticated, returns status code 401 + response = c.get( + f"/api/v2/admin/users/zip/active?contest_id={self.contest0_id}" + ) + self.assertEqual(response.status_code, 401) + + # authenticated + self.assertTrue(Login.login(c)) + + # no contest_id given + response = c.get(f"/api/v2/admin/users/zip/active") + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Cannot find contest_id None", + msg=fail_message, + ) + + response = c.get( + f"/api/v2/admin/users/zip/active?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + { + "total": {"94103": 2, "94104": 1}, + "new": {"94103": 1, "94104": 1}, + }, + ) + + def test_get_users_median_steps_by_zip(self): + c = Client() + # when unauthenticated, returns status code 401 + response = c.get( + f"/api/v2/admin/users/zip/steps?contest_id={self.contest0_id}" + ) + self.assertEqual(response.status_code, 401) + + # authenticated + self.assertTrue(Login.login(c)) + + # no contest_id given + response = c.get(f"/api/v2/admin/users/zip/steps") + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Cannot find contest_id None", + msg=fail_message, + ) + + response = c.get( + f"/api/v2/admin/users/zip/steps?contest_id={self.contest0_id}" + ) + data = response.json() + self.assertEqual( + data, + { + "all": 87500.0, + "94103": 87500.0, + }, # median of [70k, 105k] = avg of the two = 87.5k + ) diff --git a/home/tests/integration/apiv2/views/api/test_export.py b/home/tests/integration/apiv2/views/api/test_export.py new file mode 100644 index 00000000..a0255ee7 --- /dev/null +++ b/home/tests/integration/apiv2/views/api/test_export.py @@ -0,0 +1,92 @@ +import csv +import io +import logging +from datetime import date, timedelta + +from django.test import Client, TestCase + +from home.views.api.export import CSV_COLUMNS + +from .utils import Login, generate_test_data + +logger = logging.getLogger(__name__) + + +class TestExportViews(TestCase): + contest0_id = None + + @classmethod + def setUpTestData(cls): + cls.contest0_id = generate_test_data() + + def test_export_users(self): + c = Client() + self.assertTrue(Login.login(c)) + + response = c.get(f"/api/v2/export/users?contest_id={self.contest0_id}") + self.assertEqual(response.status_code, 200) + self.assertEqual("text/csv", response["Content-Type"]) + + content = response.getvalue().decode("utf-8") + reader = csv.DictReader(io.StringIO(content)) + headers = reader.fieldnames + self.assertEqual( + headers, + [col["name"] for col in CSV_COLUMNS] + + [ + str(date(3000, 2, 28) + timedelta(days=dt)) for dt in range(15) + ], + ) + + rows = list(reader) + self.assertEqual(len(rows), 4) + self.assertEqual(rows[0]["Participant Name"], "User 2") + self.assertEqual(rows[0]["Is New Signup"], "False") + self.assertEqual(rows[0]["Active During Contest"], "True") + self.assertEqual(rows[0]["Total Daily Walks During Contest"], "7") + self.assertEqual(rows[0]["Total Steps During Contest"], "70000") + self.assertEqual(rows[0]["Total Recorded Walks During Contest"], "2") + self.assertEqual( + rows[0]["Total Recorded Steps During Contest"], "4000" + ) + + self.assertEqual(rows[1]["Participant Name"], "User 3") + self.assertEqual(rows[1]["Is New Signup"], "True") + self.assertEqual(rows[1]["Active During Contest"], "True") + self.assertEqual(rows[1]["Total Daily Walks During Contest"], "7") + self.assertEqual(rows[1]["Total Steps During Contest"], "105000") + self.assertEqual(rows[1]["Total Recorded Walks During Contest"], "0") + self.assertEqual(rows[1]["Total Recorded Steps During Contest"], "") + + self.assertEqual(rows[2]["Participant Name"], "User 4") + self.assertEqual(rows[2]["Is New Signup"], "True") + self.assertEqual(rows[2]["Active During Contest"], "True") + self.assertEqual(rows[2]["Total Daily Walks During Contest"], "0") + self.assertEqual(rows[2]["Total Steps During Contest"], "") + self.assertEqual(rows[2]["Total Recorded Walks During Contest"], "2") + self.assertEqual( + rows[2]["Total Recorded Steps During Contest"], "6000" + ) + + self.assertEqual(rows[3]["Participant Name"], "User 5") + self.assertEqual(rows[3]["Is New Signup"], "True") + self.assertEqual(rows[3]["Active During Contest"], "False") + self.assertEqual(rows[3]["Total Daily Walks During Contest"], "0") + self.assertEqual(rows[3]["Total Steps During Contest"], "") + self.assertEqual(rows[3]["Total Recorded Walks During Contest"], "0") + self.assertEqual(rows[3]["Total Recorded Steps During Contest"], "") + + def test_export_users_missing_contest_id(self): + c = Client() + self.assertTrue(Login.login(c)) + + response = c.get("/api/v2/export/users?contest_id=") + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + + def test_export_users_unauthenticated(self): + c = Client() + + response = c.get(f"/api/v2/export/users?contest_id={self.contest0_id}") + # Check for a successful response by the server + self.assertEqual(response.status_code, 401) diff --git a/home/tests/integration/apiv2/views/api/utils.py b/home/tests/integration/apiv2/views/api/utils.py new file mode 100644 index 00000000..be57b428 --- /dev/null +++ b/home/tests/integration/apiv2/views/api/utils.py @@ -0,0 +1,126 @@ +import math +from datetime import date, datetime, timedelta + +from django.contrib.auth.models import User +from django.test import Client +from django.utils import timezone +from freezegun import freeze_time +from pytz import utc + +from home.utils.generators import ( + AccountGenerator, + ContestGenerator, + DailyWalkGenerator, + DeviceGenerator, + IntentionalWalkGenerator, +) + + +class Login: + username = "testadmin" + password = "test*PW" + + def __init__(self): + User.objects.create_superuser( + username=self.username, password=self.password + ) + + @classmethod + def login(cls, client: Client): + return client.login(username=cls.username, password=cls.password) + + +def generate_test_data(): + # Create user login + Login() + + # Accounts generated for testing + # account0: 94102 signup before current contest, not part of current contest + # account1: 94102 signup before current contest, tester, dailywalks 5k steps/day, intentionalwalks + # account2: 94103 signup before current contest, dailywalks 10k steps/day, intentionalwalks + # account3: 94103 signup during current contest, dailywalks 15k steps/day + # account4: 94104 signup during current contest, intentionalwalks + # account5: 94104 signup during current contest, inactive + + # Generate 3 accounts before the current contest + accounts = list(AccountGenerator().generate(3)) + # Make second account a tester + accounts[1].is_tester = True + accounts[1].save() + # Generate 3 accounts during the contest + with freeze_time("3000-03-02 12:00:00"): + accounts = accounts + list(AccountGenerator().generate(3)) + # Set names for testing ordering, zip codes for grouping + for i, account in enumerate(accounts): + account.name = f"User {i}" + account.zip = f"{94102 + math.floor(i / 2)}" + account.save() + + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + device2 = list(DeviceGenerator(accounts[2:3]).generate(1)) + device3 = list(DeviceGenerator(accounts[3:4]).generate(1)) + device4 = list(DeviceGenerator(accounts[4:5]).generate(1)) + + # Generate the "current" Contest we want to test with + params = { + "start_baseline": date(3000, 2, 28), + "start_promo": date(3000, 3, 1), + "start": date(3000, 3, 7), + "end": date(3000, 3, 14), + } + contest0 = next(ContestGenerator().generate(1, **params)) + + # Add the last five accounts to this contest + for account in accounts[1:6]: + account.contests.add(contest0) + + # Generate daily walks (10 per device) + dwalks1 = DailyWalkGenerator(device1) + dwalks2 = DailyWalkGenerator(device2) + dwalks3 = DailyWalkGenerator(device3) + tz = timezone.get_default_timezone() + for dt in range(14): + # Set dates on walks to 3000-02-28 to 3000-03-14 + t = datetime(3000, 2, 28, 10, 0).astimezone(tz) + timedelta(days=dt) + next(dwalks1.generate(1, date=t, steps=5000, distance=4000)) + next(dwalks2.generate(1, date=t, steps=10000, distance=8000)) + next(dwalks3.generate(1, date=t, steps=15000, distance=12000)) + + # Generate intentional walks (5, every other day) + iwalks1 = IntentionalWalkGenerator(device1) + iwalks2 = IntentionalWalkGenerator(device2) + iwalks4 = IntentionalWalkGenerator(device4) + for dt in range(5): + # Set dates on walks to [2, 4, 6, 8, 10] (3000-03) + t = utc.localize(datetime(3000, 3, 2, 10, 0)) + timedelta( + days=(dt * 2) + ) + next( + iwalks1.generate( + 1, start=t, end=(t + timedelta(hours=2)), steps=1000 + ) + ) + next( + iwalks2.generate( + 1, start=t, end=(t + timedelta(hours=2)), steps=2000 + ) + ) + next( + iwalks4.generate( + 1, start=t, end=(t + timedelta(hours=2)), steps=3000 + ) + ) + + # create an "old" Contest, make first acccount part of it + params = { + "start_baseline": date(2999, 2, 28), + "start_promo": date(2999, 3, 1), + "start": date(2999, 3, 7), + "end": date(2999, 3, 14), + } + contest1 = next(ContestGenerator().generate(1, **params)) + accounts[0].contests.add(contest1) + + # return the "current" contest id + return str(contest0.pk) diff --git a/home/tests/integration/apiv2/views/web/__init__.py b/home/tests/integration/apiv2/views/web/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/home/tests/integration/apiv2/views/web/__init__.py @@ -0,0 +1 @@ + diff --git a/home/tests/integration/apiv2/views/web/test_data.py b/home/tests/integration/apiv2/views/web/test_data.py new file mode 100644 index 00000000..239c6d77 --- /dev/null +++ b/home/tests/integration/apiv2/views/web/test_data.py @@ -0,0 +1,260 @@ +import csv +import io +from collections import defaultdict +from datetime import date, datetime, timedelta +from typing import Dict, List + +from django.contrib.auth.models import User +from django.test import Client, TestCase +from freezegun import freeze_time +from pytz import utc + +from home.utils.generators import ( + AccountGenerator, + ContestGenerator, + DailyWalkGenerator, + DeviceGenerator, + IntentionalWalkGenerator, +) +from home.views.web.data import USER_AGG_CSV_BASE_HEADER + + +class Login: + username = "testadmin" + password = "test*PW" + + def __init__(self): + User.objects.create_user( + username=self.username, password=self.password + ) + + +class TestCsvViews(TestCase): + @staticmethod + def login(client: Client): + return client.login(username=Login.username, password=Login.password) + + @classmethod + def setUpTestData(cls): + # Create user login + Login() + + # Generate fake accounts + accounts = list(AccountGenerator().generate(2)) + # Device associated with accounts[0] + device0 = list(DeviceGenerator(accounts[0:1]).generate(1)) + # Device associated with accounts[1] + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + + # Generate daily walks (10 per device) + dwalks0 = DailyWalkGenerator(device0) + dwalks1 = DailyWalkGenerator(device1) + for dt in range(10): + # Set dates on walks to 3000-03-01 to 3000-03-10 + t = utc.localize(datetime(3000, 3, 1, 10, 0)) + timedelta(days=dt) + next(dwalks0.generate(1, date=t)) + next(dwalks1.generate(1, date=t)) + + # Generate intentional walks (5, every other day) + iwalks0 = IntentionalWalkGenerator(device0) + iwalks1 = IntentionalWalkGenerator(device1) + for dt in range(5): + # Set dates on walks to [2, 4, 6, 8, 10] (3000-03) + t = utc.localize(datetime(3000, 3, 2, 10, 0)) + timedelta( + days=(dt * 2) + ) + next(iwalks0.generate(1, start=t, end=(t + timedelta(hours=2)))) + next(iwalks1.generate(1, start=t, end=(t + timedelta(hours=2)))) + + @staticmethod + def date_from_timestamp(ts: str) -> datetime.date: + return datetime.fromisoformat(ts).date() + + @staticmethod + def group_by(rows: List[Dict], key: str) -> Dict[str, List]: + grouped = defaultdict(list) + for row in rows: + grouped[row[key]].append(row) + return grouped + + def test_user_agg_csv_view(self): + c = Client() + self.assertTrue(self.login(c)) + + with freeze_time("3000-03-02"): + acct_params = { + "email": "custom@gmail.com", + "race": ["BL", "OT"], + "race_other": "Arab", + "gender": "OT", + "gender_other": "Gender Queer", + "sexual_orien": "OT", + "sexual_orien_other": "Pansexual", + } + next(AccountGenerator().generate(1, **acct_params)) + + params = { + "start_baseline": date(3000, 2, 28), + "start_promo": date(3000, 3, 1), + "start": date(3000, 3, 7), + "end": date(3000, 3, 14), + } + contest_id = next(ContestGenerator().generate(1, **params)).pk + + response = c.get( + "/data/users_agg.csv", + { + "contest_id": contest_id, + }, + ) + + self.assertEqual(200, response.status_code) + self.assertEqual("text/csv", response["Content-Type"]) + content = response.content.decode("utf-8") + reader = csv.DictReader(io.StringIO(content)) + + headers = reader.fieldnames + expected_headers = USER_AGG_CSV_BASE_HEADER[:] + [ + str(date(3000, 2, 28) + timedelta(days=dt)) for dt in range(15) + ] + self.assertEqual(headers, expected_headers) + + rows = list(reader) + + self.assertEqual(3, len(rows)) + + rows_by_email = {r["Email"]: r for r in rows} + for email, row in rows_by_email.items(): + expected_row = row.copy() + + if email == "custom@gmail.com": + expected_row.update( + { + "Email": "custom@gmail.com", + "Race Other": "Arab", + "Gender Identity": "OT", + "Gender Identity Other": "Gender Queer", + "Sexual Orientation": "OT", + "Sexual Orientation Other": "Pansexual", + "Is New Signup": "yes", + "Active During Contest": "no", + } + ) + expected_row.update( + {k: "" for k in expected_headers[14:]} + ) # Empty walk data + self.assertEqual(row, expected_row) + self.assertIn( + row["Race"], {"['OT', 'BL']", "['BL', 'OT']"} + ) # order is non-deterministic + + else: + expected_row.update( + { + "Active During Contest": "yes", + # Daily walks on 3-7 through 3-10 + "Total Daily Walks During Contest": "4", + # Daily walks on 3-1 through 3-6 (before contest start) + "Total Daily Walks During Baseline": "6", + # Intentional walks on 3-8, 3-10 + "Total Recorded Walks During Contest": "2", + # Intentional walks on 3-2, 3-4, 3-6 + "Total Recorded Walks During Baseline": "3", + "3000-02-28": "", + "3000-03-11": "", + "3000-03-12": "", + "3000-03-13": "", + "3000-03-14": "", + } + ) + + walk_days = [ + date(3000, 3, 1) + timedelta(days=d) for d in range(10) + ] # generated daily walks + for walk_day in walk_days: + self.assertIsNot( + row[str(walk_day)], "" + ) # Some amt of steps on these days in contest/baseline + + for col_name in [ + "Total Steps During Contest", + "Total Steps During Baseline", + "Total Recorded Steps During Contest", + "Total Recorded Steps During Baseline", + "Total Recorded Walk Time During Contest", + "Total Recorded Walk Time During Baseline", + ]: + self.assertIsNot( + row[col_name], "" + ) # Should be populated from generated dw/iw data + + # Test csv response of daily walks + def test_daily_walks_csv_view(self): + c = Client() + self.assertTrue(self.login(c)) + start_date = date(3000, 3, 7) + end_date = date(3000, 3, 14) + response = c.get( + "/data/daily_walks.csv", + { + "start_date": start_date.isoformat(), + "end_date": end_date.isoformat(), + }, + ) + + self.assertEqual(200, response.status_code) + self.assertEqual("text/csv", response["Content-Type"]) + content = response.content.decode("utf-8") + reader = csv.DictReader(io.StringIO(content)) + rows = list(reader) + headers = reader.fieldnames + self.assertIn("date", headers) + self.assertIn("steps", headers) + + grouped_rows = self.group_by(rows, "email") + for user, walks in grouped_rows.items(): + self.assertEqual(4, len(walks)) # [7, 8, 9, 10] + + for walk in walks: + self.assertGreaterEqual( + date.fromisoformat(walk["date"]), start_date + ) + self.assertLessEqual( + date.fromisoformat(walk["date"]), end_date + ) + + # Test csv response of intentional (recorded) walks + def test_intentional_walks_csv_view(self): + c = Client() + self.assertTrue(self.login(c)) + start_date = date(3000, 3, 7) + end_date = date(3000, 3, 14) + response = c.get( + "/data/intentional_walks.csv", + { + "start_date": start_date.isoformat(), + "end_date": end_date.isoformat(), + }, + ) + + self.assertEqual("text/csv", response["Content-Type"]) + self.assertEqual(200, response.status_code) + content = response.content.decode("utf-8") + reader = csv.DictReader(io.StringIO(content)) + rows = list(reader) + headers = reader.fieldnames + self.assertIn("event_id", headers) + self.assertIn("steps", headers) + + grouped_rows = self.group_by(rows, "email") + + for user, walks in grouped_rows.items(): + self.assertEqual(2, len(walks)) # [8, 10] + + for walk in walks: + self.assertGreaterEqual( + self.date_from_timestamp(walk["start_time"]), start_date + ) + self.assertLessEqual( + self.date_from_timestamp(walk["end_time"]), end_date + ) diff --git a/home/tests/integration/apiv2/weeklygoal/__init__.py b/home/tests/integration/apiv2/weeklygoal/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/integration/apiv2/weeklygoal/test_create.py b/home/tests/integration/apiv2/weeklygoal/test_create.py new file mode 100644 index 00000000..b1303757 --- /dev/null +++ b/home/tests/integration/apiv2/weeklygoal/test_create.py @@ -0,0 +1,264 @@ +from django.test import Client, TestCase + +from home.models import Device + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + + self.account_id = "12345" + + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": self.account_id, + }, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # device = Device.objects.get(device_id=self.account_id) + # self.account = device.account + + # Details for Weekly Goal creation + self.url = "/api/v2/weeklygoal" + self.start_of_week = "2023-08-21" + self.start_of_week_param = "2023-08-23" + # Request parameters + self.request_params = { + "account_id": self.account_id, + "weekly_goal": { + "start_of_week": self.start_of_week_param, + "steps": 2000, + "days": 3, + }, + } + # Content type + self.content_type = "application/json" + + # Test a successful creation of a weekly goal + def test_create_weeklygoal(self): + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["weekly_goal"]["start_of_week"], + self.start_of_week, + msg=fail_message, + ) + self.assertEqual( + response_data["weekly_goal"]["steps"], + self.request_params["weekly_goal"]["steps"], + msg=fail_message, + ) + self.assertEqual( + response_data["weekly_goal"]["days"], + self.request_params["weekly_goal"]["days"], + msg=fail_message, + ) + + # Test a successful update of a weekly goal where a weekly goal already exists + def test_update_weeklygoal(self): + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["weekly_goal"]["start_of_week"], + self.start_of_week, + msg=fail_message, + ) + self.assertEqual( + response_data["weekly_goal"]["steps"], + self.request_params["weekly_goal"]["steps"], + msg=fail_message, + ) + self.assertEqual( + response_data["weekly_goal"]["days"], + self.request_params["weekly_goal"]["days"], + msg=fail_message, + ) + + # change weekly_goal values + self.request_params["weekly_goal"]["start_of_week"] = "2023-08-25" + self.request_params["weekly_goal"]["steps"] = 50000 + self.request_params["weekly_goal"]["days"] = 7 + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["weekly_goal"]["start_of_week"], + self.start_of_week, + msg=fail_message, + ) + self.assertEqual( + response_data["weekly_goal"]["steps"], + self.request_params["weekly_goal"]["steps"], + msg=fail_message, + ) + self.assertEqual( + response_data["weekly_goal"]["days"], + self.request_params["weekly_goal"]["days"], + msg=fail_message, + ) + + # Test a creation of a weekly goal with an invalid user account + def test_create_weeklygoal_invalid_account(self): + self.request_params["account_id"] = "0000000" + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Unregistered device - " + f'{self.request_params["account_id"]}.' + " Please register first!", + msg=fail_message, + ) + + # Test a creation of a weekly goal with missing weekly_goal + def test_create_weeklygoal_wihtout_weeklygoal(self): + + del self.request_params["weekly_goal"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "weekly_goal", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test a creation of a weekly goal with missing weekly_goal field steps + def test_create_weeklygoal_wihtout_weeklygoal_steps(self): + + del self.request_params["weekly_goal"]["steps"] + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "steps", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + # Test invalid methods + def test_create_weeklygoal_invalid_methods(self): + # Test not allowed patch method + response = self.client.patch( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed put method + response = self.client.put( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) + + # Test not allowed delete method + response = self.client.delete( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 405) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data, b"Method not allowed", msg=fail_message + ) diff --git a/home/tests/integration/apiv2/weeklygoal/test_get.py b/home/tests/integration/apiv2/weeklygoal/test_get.py new file mode 100644 index 00000000..c7016aef --- /dev/null +++ b/home/tests/integration/apiv2/weeklygoal/test_get.py @@ -0,0 +1,198 @@ +from django.forms.models import model_to_dict +from django.test import Client, TestCase + +from home.models import Device, WeeklyGoal + + +class ApiTestCase(TestCase): + def setUp(self): + # Test client + self.client = Client() + + self.email = "abhay@blah.com" + self.account_id = "12345" + + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": self.email, + "zip": "94102", + "age": 99, + "account_id": self.account_id, + }, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + device = Device.objects.get(device_id=self.account_id) + self.account = device.account + + # Define weekly goals + self.weekly_goals = [ + WeeklyGoal( + account=self.account, + start_of_week="2023-08-21", + steps=3, + days=2000, + ), + WeeklyGoal( + account=self.account, + start_of_week="2023-08-27", + steps=4, + days=2500, + ), + WeeklyGoal( + account=self.account, + start_of_week="2023-09-04", + steps=5, + days=3000, + ), + ] + + # Create weekly goals + response = WeeklyGoal.objects.bulk_create(self.weekly_goals) + + i = 0 + # Check for a successful response by the server + for item in response: + self.assertEqual(item, self.weekly_goals[i]) + i += 1 + + # Details for intentional walk list view + self.url = "/api/v2/weeklygoal" + # Request parameters + self.request_params = {"account_id": self.account_id} + + # Test a successful request for weekly goals + def test_weeklygoal_get_success(self): + # Send the request + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}" + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200, msg=f"{response.content}") + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertIn("weekly_goals", response_data, msg=fail_message) + i = 2 + for goal in response_data["weekly_goals"]: + goalDict = model_to_dict(self.weekly_goals[i]) + goalDict["account_id"] = goalDict.get("account") + del goalDict["account"] + self.assertEqual(goal, goalDict) + i -= 1 + + # Test getting weekly goals from an account that doesn't exist + def test_weeklygoal_get_failure_invalid_account(self): + self.request_params["account_id"] = "0000000" + + # Send the request + response = self.client.get( + path=f"{self.url}/{self.request_params['account_id']}" + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Unregistered account - " + f'{self.request_params["account_id"]}.' + " Please register first!", + msg=fail_message, + ) + + # Test getting weekly goals with missing account id param + def test_weeklygoal_get_failure_missing_account_id(self): + del self.request_params["account_id"] + + # Send the request + response = self.client.get( + path=f"{self.url}/", + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.content + fail_message = f"Server response - {response_data}" + self.assertIn(b"Not Found", response_data, msg=fail_message) + + # # Test invalid methods + # def test_weeklygoal_get_invalid_methods(self): + # # Test not allowed get method + # response = self.client.get( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 200) + # # Parse the response + # response_data = response.json() + # fail_message = f"Server response - {response_data}" + # self.assertEqual(response_data["status"], "error", msg=fail_message) + # self.assertEqual( + # response_data["message"], + # "Method not allowed!", + # msg=fail_message, + # ) + + # # Test not allowed patch method + # response = self.client.patch( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 200) + # # Parse the response + # response_data = response.json() + # fail_message = f"Server response - {response_data}" + # self.assertEqual(response_data["status"], "error", msg=fail_message) + # self.assertEqual( + # response_data["message"], + # "Method not allowed!", + # msg=fail_message, + # ) + + # # Test not allowed put method + # response = self.client.put( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 200) + # # Parse the response + # response_data = response.json() + # fail_message = f"Server response - {response_data}" + # self.assertEqual(response_data["status"], "error", msg=fail_message) + # self.assertEqual( + # response_data["message"], + # "Method not allowed!", + # msg=fail_message, + # ) + + # # Test not allowed delete method + # response = self.client.delete( + # path=self.url, + # data=self.request_params, + # content_type=self.content_type, + # ) + # # Check for a successful response by the server + # self.assertEqual(response.status_code, 200) + # # Parse the response + # response_data = response.json() + # fail_message = f"Server response - {response_data}" + # self.assertEqual(response_data["status"], "error", msg=fail_message) + # self.assertEqual( + # response_data["message"], + # "Method not allowed!", + # msg=fail_message, + # ) diff --git a/home/tests/integration/appuser/__init__.py b/home/tests/integration/appuser/__init__.py deleted file mode 100644 index f70219f3..00000000 --- a/home/tests/integration/appuser/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# home.tests.integration.appuser diff --git a/home/tests/integration/dailywalk/__init__.py b/home/tests/integration/dailywalk/__init__.py deleted file mode 100644 index 0aff0cf4..00000000 --- a/home/tests/integration/dailywalk/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# home.tests.integration.dailywalk diff --git a/home/tests/integration/intentionalwalk/__init__.py b/home/tests/integration/intentionalwalk/__init__.py deleted file mode 100644 index fca75664..00000000 --- a/home/tests/integration/intentionalwalk/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# home.tests.integration.intentionalwalk diff --git a/home/tests/integration/weeklygoal/__init__.py b/home/tests/integration/weeklygoal/__init__.py deleted file mode 100644 index 0225e5d0..00000000 --- a/home/tests/integration/weeklygoal/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# home.tests.integration.weeklygoal diff --git a/home/tests/unit/apiv1/__init__.py b/home/tests/unit/apiv1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/unit/apiv1/api/__init__.py b/home/tests/unit/apiv1/api/__init__.py new file mode 100644 index 00000000..917000b9 --- /dev/null +++ b/home/tests/unit/apiv1/api/__init__.py @@ -0,0 +1 @@ +# home.tests diff --git a/home/tests/unit/api/test_appuser.py b/home/tests/unit/apiv1/api/test_appuser.py similarity index 100% rename from home/tests/unit/api/test_appuser.py rename to home/tests/unit/apiv1/api/test_appuser.py diff --git a/home/tests/unit/api/test_leaderboard.py b/home/tests/unit/apiv1/api/test_leaderboard.py similarity index 100% rename from home/tests/unit/api/test_leaderboard.py rename to home/tests/unit/apiv1/api/test_leaderboard.py diff --git a/home/tests/unit/test_contest.py b/home/tests/unit/apiv1/test_contest.py similarity index 100% rename from home/tests/unit/test_contest.py rename to home/tests/unit/apiv1/test_contest.py diff --git a/home/tests/unit/test_dates.py b/home/tests/unit/apiv1/test_dates.py similarity index 100% rename from home/tests/unit/test_dates.py rename to home/tests/unit/apiv1/test_dates.py diff --git a/home/tests/unit/test_histogram.py b/home/tests/unit/apiv1/test_histogram.py similarity index 99% rename from home/tests/unit/test_histogram.py rename to home/tests/unit/apiv1/test_histogram.py index b07cc734..fd0edc0c 100644 --- a/home/tests/unit/test_histogram.py +++ b/home/tests/unit/apiv1/test_histogram.py @@ -7,7 +7,7 @@ from home.models.dailywalk import DailyWalk from home.models.intentionalwalk import IntentionalWalk from home.models.leaderboard import Leaderboard -from home.tests.integration.views.api.utils import generate_test_data +from home.tests.integration.apiv1.views.api.utils import generate_test_data from home.views.api.histogram.serializers import HistogramReqSerializer diff --git a/home/tests/unit/test_user.py b/home/tests/unit/apiv1/test_user.py similarity index 100% rename from home/tests/unit/test_user.py rename to home/tests/unit/apiv1/test_user.py diff --git a/home/tests/unit/test_utils.py b/home/tests/unit/apiv1/test_utils.py similarity index 86% rename from home/tests/unit/test_utils.py rename to home/tests/unit/apiv1/test_utils.py index 5af612c6..7fe023db 100644 --- a/home/tests/unit/test_utils.py +++ b/home/tests/unit/apiv1/test_utils.py @@ -1,10 +1,10 @@ -from home.tests.integration.views.api.utils import Login -from home.views.api.utils import require_authn -from django.contrib.auth.models import User, AnonymousUser - -from django.test import TestCase, RequestFactory -from django.views import View +from django.contrib.auth.models import AnonymousUser, User from django.http import HttpResponse +from django.test import RequestFactory, TestCase +from django.views import View + +from home.tests.integration.apiv1.views.api.utils import Login +from home.views.api.utils import require_authn class TestAuthn(TestCase): diff --git a/home/tests/unit/apiv2/__init__.py b/home/tests/unit/apiv2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/home/tests/unit/apiv2/api/__init__.py b/home/tests/unit/apiv2/api/__init__.py new file mode 100644 index 00000000..917000b9 --- /dev/null +++ b/home/tests/unit/apiv2/api/__init__.py @@ -0,0 +1 @@ +# home.tests diff --git a/home/tests/unit/apiv2/api/test_appuser.py b/home/tests/unit/apiv2/api/test_appuser.py new file mode 100644 index 00000000..78e55afe --- /dev/null +++ b/home/tests/unit/apiv2/api/test_appuser.py @@ -0,0 +1,66 @@ +from django.test import TestCase + +from home.views.api.appuser import is_tester, validate_account_input + + +class TestIsTester(TestCase): + def test_is_tester(self): + examples = [ + ("Tester A", True), + ("Test B", False), # are we sure this is the behavior we want? + ("tester c", True), + ("Testerosa", False), + ("tester-d", True), + ("Tester_E", True), + ("testrata", False), + ("tester", False), # are we sure this is the behavior we want? + ] + for example, expected in examples: + self.assertEqual( + expected, is_tester(example), f"failed '{example}'" + ) + + +class TestValidateAccountInput(TestCase): + def test_valid_input(self): + examples = [ + dict( + zip="12345", + age=99, + is_latino="YE", + race=["BL"], + gender="TF", + sexual_orien="SG", + ), + dict( + zip="12345", age=99, is_latino="DA", race=["DA"], gender="DA" + ), + dict(is_latino=None, gender=None, race=None), + dict( + gender="OT", + gender_other="other gender", + sexual_orien="OT", + sexual_orien_other="pansexual", + ), + dict(race=["BL", "OT"], race_other="other race"), + dict(), + ] + + for example in examples: + validate_account_input(example) + + def test_invalid_input(self): + examples = [ + dict(name=""), + dict(zip="1234"), + dict(age=0), + dict(is_latino=True), + dict(gender=None, gender_other="other gender"), + dict(sexual_orien=None, sexual_orien_other="idk"), + dict(race=None, race_other="other race"), + dict(gender="NB", gender_other="nonbinary"), + ] + + for example in examples: + with self.assertRaises(AssertionError, msg=example): + validate_account_input(example) diff --git a/home/tests/unit/apiv2/api/test_leaderboard.py b/home/tests/unit/apiv2/api/test_leaderboard.py new file mode 100644 index 00000000..4c38c925 --- /dev/null +++ b/home/tests/unit/apiv2/api/test_leaderboard.py @@ -0,0 +1,508 @@ +import logging +import urllib + +from django.test import Client, TestCase +from freezegun import freeze_time + +from home.models import Contest, Device, Leaderboard +from home.utils.generators import AccountGenerator, DeviceGenerator + +logger = logging.getLogger(__name__) + + +class TestLeaderboard(TestCase): + def setUp(self): + # Test client + self.client = Client() + + # Device ID is passed to the API as "account_id", + self.device_id = "12345" + + # Create a user + response = self.client.post( + path="/api/v2/appuser", + data={ + "name": "Abhay Kashyap", + "email": "abhay@blah.com", + "zip": "94102", + "age": 99, + "account_id": self.device_id, + }, + content_type="application/json", + ) + + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Details for Daily walk event creation + self.url = "/api/v2/dailywalk" + # Request parameters + self.request_params = { + "account_id": "12345", + "daily_walks": [ + {"date": "3000-02-22", "steps": 500, "distance": 1.3} + ], + } + self.bulk_request_params = { + "account_id": "12345", + "daily_walks": [ + {"date": "3000-02-21", "steps": 1500, "distance": 2.1}, + {"date": "3000-02-22", "steps": 500, "distance": 0.8}, + {"date": "3000-02-23", "steps": 1000, "distance": 1.4}, + ], + } + # Content type + self.content_type = "application/json" + + # Test a successful creation of a daily walk (within a contest) + # Test autogenerated Leaderboard entry with correct number of steps + + def test_create_dailywalk_and_leaderboard(self): + # Create a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + + # Verify that the user has no contests + acct = Device.objects.get(device_id=self.device_id).account + self.assertFalse(acct.contests.exists()) + + with freeze_time("3000-02-15"): + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + leaderboard_count = Leaderboard.objects.count() + leaderboard_steps_count = Leaderboard.objects.get( + device=self.device_id + ).steps + + # Expected: 1 Leaderboard entry with 500 steps + self.assertEqual(1, leaderboard_count) + self.assertEqual(500, leaderboard_steps_count) + + # Leaderboard Get request test and data validation + # Test that tester account is not included + def test_get_leaderboard(self): + + accounts = list(AccountGenerator().generate(14)) + accounts[-1].is_tester = True + accounts[-1].save() + + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + device2 = list(DeviceGenerator(accounts[2:3]).generate(1)) + device3 = list(DeviceGenerator(accounts[3:4]).generate(1)) + device4 = list(DeviceGenerator(accounts[4:5]).generate(1)) + device5 = list(DeviceGenerator(accounts[5:6]).generate(1)) + device6 = list(DeviceGenerator(accounts[6:7]).generate(1)) + device7 = list(DeviceGenerator(accounts[7:8]).generate(1)) + device8 = list(DeviceGenerator(accounts[8:9]).generate(1)) + device9 = list(DeviceGenerator(accounts[9:10]).generate(1)) + device10 = list(DeviceGenerator(accounts[10:11]).generate(1)) + device11 = list(DeviceGenerator(accounts[11:12]).generate(1)) + device12 = list(DeviceGenerator(accounts[12:13]).generate(1)) + device13 = list(DeviceGenerator(accounts[13:14]).generate(1)) + + # dwalks1 = DailyWalkGenerator(device1) + # dwalks2 = DailyWalkGenerator(device2) + # dwalks3 = DailyWalkGenerator(device3) + # dwalks4 = DailyWalkGenerator(device4) + # dwalks5 = DailyWalkGenerator(device5) + # dwalks6 = DailyWalkGenerator(device6) + # dwalks7 = DailyWalkGenerator(device7) + # dwalks8 = DailyWalkGenerator(device8) + # dwalks9 = DailyWalkGenerator(device9) + # dwalks10 = DailyWalkGenerator(device10) + # dwalks11 = DailyWalkGenerator(device11) + # dwalks12 = DailyWalkGenerator(device12) + + # for dt in range(14): + # # Set dates on walks to 3000-02-28 to 3000-03-14 + # t = utc.localize(datetime(3000, 2, 28, 10, 0)) + timedelta(days=dt) + # next(dwalks1.generate(1, date=t, steps=5000, distance=4000)) + # next(dwalks2.generate(1, date=t, steps=10000, distance=8000)) + # next(dwalks3.generate(1, date=t, steps=15000, distance=12000)) + # next(dwalks4.generate(1, date=t, steps=5000, distance=4000)) + # next(dwalks5.generate(1, date=t, steps=10000, distance=8000)) + # next(dwalks6.generate(1, date=t, steps=15000, distance=12000)) + # next(dwalks7.generate(1, date=t, steps=5000, distance=4000)) + # next(dwalks8.generate(1, date=t, steps=10000, distance=8000)) + # next(dwalks9.generate(1, date=t, steps=15000, distance=12000)) + # next(dwalks10.generate(1, date=t, steps=5000, distance=4000)) + # next(dwalks11.generate(1, date=t, steps=10000, distance=8000)) + # next(dwalks12.generate(1, date=t, steps=15000, distance=12000)) + + # Create 12 Leadeboard entries + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[1:2][0], + steps=5, + ) + Leaderboard.objects.create( + device=device2[0], + contest=contest, + account=accounts[2:3][0], + steps=10, + ) + Leaderboard.objects.create( + device=device3[0], + contest=contest, + account=accounts[3:4][0], + steps=50, + ) + Leaderboard.objects.create( + device=device4[0], + contest=contest, + account=accounts[4:5][0], + steps=200, + ) + Leaderboard.objects.create( + device=device5[0], + contest=contest, + account=accounts[5:6][0], + steps=400, + ) + Leaderboard.objects.create( + device=device6[0], + contest=contest, + account=accounts[6:7][0], + steps=800, + ) + Leaderboard.objects.create( + device=device7[0], + contest=contest, + account=accounts[7:8][0], + steps=1200, + ) + Leaderboard.objects.create( + device=device8[0], + contest=contest, + account=accounts[8:9][0], + steps=2000, + ) + Leaderboard.objects.create( + device=device9[0], + contest=contest, + account=accounts[9:10][0], + steps=3000, + ) + Leaderboard.objects.create( + device=device10[0], + contest=contest, + account=accounts[10:11][0], + steps=4000, + ) + Leaderboard.objects.create( + device=device11[0], + contest=contest, + account=accounts[11:12][0], + steps=5000, + ) + Leaderboard.objects.create( + device=device12[0], + contest=contest, + account=accounts[12:13][0], + steps=11000, + ) + # Tester account, shouldn't be in output + Leaderboard.objects.create( + device=device13[0], + contest=contest, + account=accounts[13:14][0], + steps=15000, + ) + + # Test Get request - Leaderboard + self.client = Client() + + data = { + "contest_id": contest.contest_id, + "device_id": device10[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + response_data = response.json() + + # Validate that tester account ({'account_id': 16, 'steps': 15000} isn't included) + self.assertNotEqual(response_data["leaderboard"][0]["steps"], 15000) + + # Validate 10 Leaderboard entries received as user is in top 10 + self.assertEqual(response.status_code, 200) + listlength = len(response_data["leaderboard"]) + self.assertEqual(listlength, 10) + + data = { + "contest_id": contest.contest_id, + "device_id": device2[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + # Check for a successful response by the server + self.assertEqual(response.status_code, 200) + + response_data_pretest = response.json() + + # Validate 11 Leaderboard entries received as user is not in top 10 + listlength = len(response_data_pretest["leaderboard"]) + self.assertEqual(listlength, 11) + + # Validate that data from before and after the contest period won't update the leaderboard; + # and data during the contest will update the leaderboard + + self.url = "/api/v2/dailywalk" + + # Request parameters + self.request_params_before = { + "account_id": device1[0].device_id, + "daily_walks": [ + {"date": "3000-01-30", "steps": 5000, "distance": 1.3} + ], + } + self.request_params_after = { + "account_id": device2[0].device_id, + "daily_walks": [ + {"date": "3000-03-01", "steps": 5000, "distance": 1.3} + ], + } + + self.request_params_during_fail = { + "account_id": device2[0].device_id, + "daily_walks": [ + {"date": "3000-02-15", "steps": 5000, "distance": 1.3} + ], + } + + # Daily walk added before contest start and after contest end + # Before contest start: + # Content type + self.content_type = "application/json" + + # Send the request + response = self.client.post( + path=self.url, + data=self.request_params_before, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # After contest end: + response = self.client.post( + path=self.url, + data=self.request_params_after, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + + # Check if Leaderboard has changed, + data = { + "contest_id": contest.contest_id, + "device_id": device2[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + + response_data = response.json() + + # Expected: Leaderboard is unchanged as dailywalks are outside contest period + self.assertEqual( + response_data["leaderboard"], + response_data_pretest["leaderboard"], + ) + self.assertEqual(response.status_code, 200) + + with freeze_time("3000-02-15"): + # Dailywalk submitted during contest period + response = self.client.post( + path=self.url, + data=self.request_params_during_fail, + content_type=self.content_type, + ) + # Check for a successful response by the server + self.assertEqual(response.status_code, 201) + # Parse the response + response_data = response.json() + + # Check if Leaderboard changes + data = { + "contest_id": contest.contest_id, + "device_id": device2[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + + response_data = response.json() + # Expected: Leaderboard has changed and is not equal to initial Leaderboard get request. + self.assertNotEqual( + response_data["leaderboard"], + response_data_pretest["leaderboard"], + ) + self.assertEqual(response.status_code, 200) + + def test_get_leaderboard_missing_contest_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = { + "device_id": device1[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + # Check for a 422 response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "contest_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + def test_get_leaderboard_invalid_contest_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = { + "contest_id": "invalid_contest_id", + "device_id": device1[0].device_id, + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + # Check for a 404 Not Found response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + "Contest not found", + msg=fail_message, + ) + + def test_get_leaderboard_missing_device_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = {"contest_id": contest.contest_id} + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + # Check for a 422 response by the server + self.assertEqual(response.status_code, 422) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"][0]["type"], "missing", msg=fail_message + ) + self.assertIn( + "device_id", response_data["detail"][0]["loc"], msg=fail_message + ) + self.assertEqual( + response_data["detail"][0]["msg"], + "Field required", + msg=fail_message, + ) + + def test_get_leaderboard_invalid_device_id(self): + # generate active account + accounts = list(AccountGenerator().generate(2)) + # generate a contest + contest = Contest() + contest.start_baseline = "3000-01-01" + contest.start_promo = "3000-02-01" + contest.start = "3000-02-01" + contest.end = "3000-02-28" + contest.save() + # generate devices for the active accounts + device1 = list(DeviceGenerator(accounts[1:2]).generate(1)) + Leaderboard.objects.create( + device=device1[0], + contest=contest, + account=accounts[0], + steps=5, + ) + data = { + "contest_id": contest.contest_id, + "device_id": "invalid_device_id", + } + query = urllib.parse.urlencode(data) + response = self.client.get("/api/v2/leaderboard/get?" + query) + # Check for a successful response by the server + self.assertEqual(response.status_code, 404) + # Parse the response + response_data = response.json() + fail_message = f"Server response - {response_data}" + self.assertEqual( + response_data["detail"], + f"Unregistered device - {data['device_id']}. Please register first!", + msg=fail_message, + ) diff --git a/home/tests/unit/apiv2/test_contest.py b/home/tests/unit/apiv2/test_contest.py new file mode 100644 index 00000000..6a2fc9a1 --- /dev/null +++ b/home/tests/unit/apiv2/test_contest.py @@ -0,0 +1,251 @@ +from datetime import date + +from django.core.exceptions import ValidationError +from django.test import TestCase +from freezegun import freeze_time +from home.models import Account, Contest + + +class TestContest(TestCase): + + # Test a successful creation of a Contest + def test_create(self): + contest = Contest() + contest.start_baseline = "2020-06-01" + contest.start_promo = "2020-07-01" + contest.start = "2020-07-01" + contest.end = "2020-07-31" + contest.save() + self.assertIsNotNone(contest.pk) + + # Test validation of start/end dates + def test_validates_dates(self): + contest = Contest() + + # Start before baseline => error + contest.start_baseline = "2020-07-01" + contest.start_promo = "2020-06-21" + contest.start = "2020-07-01" + contest.end = "2020-07-31" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, + "Baseline period must begin before contest start", + ) + + # Start before promo is error + contest.start_baseline = "2020-06-01" + contest.start_promo = "2020-07-07" + contest.start = "2020-07-01" + contest.end = "2020-07-01" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, + "Promotion must start before or at same time as Start", + ) + + # Start same as end is error + contest.start_baseline = "2020-06-01" + contest.start_promo = "2020-07-01" + contest.start = "2020-07-01" + contest.end = "2020-07-01" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "End of contest must be after Start" + ) + + # Start after end is error + contest.start_baseline = "2020-06-01" + contest.start_promo = "2020-07-01" + contest.start = "2020-07-31" + contest.end = "2020-07-01" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "End of contest must be after Start" + ) + + # Save a valid contest + contest.start_baseline = "2020-06-01" + contest.start_promo = "2020-06-21" + contest.start = "2020-07-01" + contest.end = "2020-07-31" + contest.save() + self.assertIsNotNone(contest.pk) + + # Editing an existing contest does not raise an error + contest.end = "2020-07-21" + contest.save() + self.assertEqual("2020-07-21", contest.end) + + def test_overlapping_contests(self): + # Save a valid contest + existing_contest = Contest() + existing_contest.start_baseline = None + existing_contest.start_promo = "2020-06-21" + existing_contest.start = "2020-07-01" + existing_contest.end = "2020-07-31" + existing_contest.save() + self.assertIsNotNone(existing_contest.pk) + + # Test validation of start/end dates + contest = Contest() + + # New overlapping contests cause errors + contest.start_promo = "2020-06-01" + contest.start = "2020-06-01" + contest.end = "2020-06-28" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "Contest must not overlap another" + ) + + contest.start_promo = "2020-06-01" + contest.start = "2020-06-01" + contest.end = "2020-08-14" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "Contest must not overlap another" + ) + + contest.start_promo = "2020-07-01" + contest.start = "2020-07-01" + contest.end = "2020-07-21" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "Contest must not overlap another" + ) + + contest.start_promo = "2020-07-07" + contest.start = "2020-07-07" + contest.end = "2020-07-14" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "Contest must not overlap another" + ) + + contest.start_promo = "2020-07-14" + contest.start = "2020-07-14" + contest.end = "2020-08-21" + with self.assertRaises(ValidationError) as error: + contest.save() + self.assertEqual( + error.exception.message, "Contest must not overlap another" + ) + + # It IS okay if the baseline or promo date occurs during a different + # (previous) contest. + contest.start_baseline = "2020-07-01" + contest.start_promo = "2020-07-21" + contest.start = "2020-08-01" + contest.end = "2020-08-31" + contest.save() + self.assertIsNotNone(contest.pk) + + def test_active(self): + # create a few contests + contest1 = Contest() + contest1.start_baseline = "3000-04-01" + contest1.start_promo = "3000-04-24" + contest1.start = "3000-05-01" + contest1.end = "3000-05-31" + contest1.save() + + contest2 = Contest() + contest1.start_baseline = "3000-06-01" + contest2.start_promo = "3000-06-21" + contest2.start = "3000-07-01" + contest2.end = "3000-07-31" + contest2.save() + + # Helper function + def _assertEqual(create_obj, db_obj): + self.assertIsNotNone(db_obj) + self.assertEqual(str(create_obj.pk), db_obj.pk) + + # before first baseline, failure + with freeze_time("3000-03-31"): + self.assertIsNone(Contest.active()) + self.assertIsNone(Contest.active(strict=True)) + + # after first baseline but before first promo starts, failure + with freeze_time("3000-04-02"): + self.assertIsNone(Contest.active()) + self.assertIsNone(Contest.active(strict=True)) + + # after promo starts for first contest + with freeze_time("3000-04-28"): + _assertEqual(contest1, Contest.active()) + _assertEqual(contest1, Contest.active(strict=True)) + + # during first contest + with freeze_time("3000-05-15"): + _assertEqual(contest1, Contest.active()) + _assertEqual(contest1, Contest.active(strict=True)) + + # after first contest, before baseline and promo starts for next + with freeze_time("3000-06-14"): + _assertEqual(contest1, Contest.active()) + self.assertIsNone(Contest.active(strict=True)) + + # after promo starts for next + with freeze_time("3000-06-28"): + _assertEqual(contest2, Contest.active()) + _assertEqual(contest2, Contest.active(strict=True)) + + # after last contest + with freeze_time("3000-08-14"): + _assertEqual(contest2, Contest.active()) + self.assertIsNone(Contest.active(strict=True)) + + # Now test the same using Contest.active with `for_date` + # instead of faking time + self.assertIsNone(Contest.active(for_date=date(3000, 4, 1))) + + _assertEqual(contest1, Contest.active(for_date=date(3000, 4, 28))) + _assertEqual(contest1, Contest.active(for_date=date(3000, 5, 15))) + _assertEqual(contest1, Contest.active(for_date=date(3000, 6, 14))) + _assertEqual(contest2, Contest.active(for_date=date(3000, 6, 28))) + _assertEqual(contest2, Contest.active(for_date=date(3000, 8, 14))) + + # Test with `for_date` and `strict` + self.assertIsNone( + Contest.active(for_date=date(3000, 8, 14), strict=True) + ) + + def test_for_baseline(self): + # create a few contests + contest1 = Contest() + contest1.start_baseline = "3000-04-01" + contest1.start_promo = "3000-04-24" + contest1.start = "3000-05-01" + contest1.end = "3000-05-31" + contest1.save() + + self.assertIsNone(Contest.for_baseline("3000-03-31")) + self.assertEqual( + str(contest1.pk), Contest.for_baseline("3000-04-01").pk + ) + self.assertIsNone(Contest.for_baseline("3000-05-01")) + + def test_associate_contest_with_account(self): + contest = Contest.objects.create( + start_promo="3000-04-24", + start="3000-05-01", + end="3000-05-31", + ) + # TODO: use generator + acct = Account.objects.create( + email="fake@us.email", + age=100, + ) + acct.contests.add(contest) + acct.contests.add(contest) + self.assertEqual(1, len(Contest.objects.all())) diff --git a/home/tests/unit/apiv2/test_dates.py b/home/tests/unit/apiv2/test_dates.py new file mode 100644 index 00000000..d7fd0495 --- /dev/null +++ b/home/tests/unit/apiv2/test_dates.py @@ -0,0 +1,20 @@ +from datetime import date + +from django.test import TestCase + +from home.utils.dates import get_start_of_week, get_start_of_current_week + + +class TestDates(TestCase): + + # Test get_start_of_week + def test_get_start_of_week(self): + dt = date(2023, 8, 23) + monday = date(2023, 8, 21) + start_of_week = get_start_of_week(dt) + self.assertEqual(monday, start_of_week) + + def test_get_start_of_curent_week(self): + dt = date.today() + d = get_start_of_week(dt) + self.assertAlmostEqual(d, get_start_of_current_week()) diff --git a/home/tests/unit/apiv2/test_histogram.py b/home/tests/unit/apiv2/test_histogram.py new file mode 100644 index 00000000..77dc1c4e --- /dev/null +++ b/home/tests/unit/apiv2/test_histogram.py @@ -0,0 +1,459 @@ +from datetime import date, datetime, timezone +from random import seed + +from django.test import TestCase + +from home.models import Account, Contest +from home.models.dailywalk import DailyWalk +from home.models.intentionalwalk import IntentionalWalk +from home.models.leaderboard import Leaderboard +from home.tests.integration.apiv2.views.api.utils import generate_test_data +from home.views.api.histogram.serializers import HistogramReqSerializer + + +class TestHistogram(TestCase): + def setUp(self): + seed(123) + self.contest_id = generate_test_data() + no_participants = Contest( + start=datetime(5000, 1, 2, tzinfo=timezone.utc), + end=datetime(5000, 1, 31, tzinfo=timezone.utc), + start_promo=datetime(5000, 1, 1, tzinfo=timezone.utc), + start_baseline=datetime(5000, 1, 1, tzinfo=timezone.utc), + ) + no_participants.save() + self.empty_contest_id = str(no_participants.contest_id) + + def tearDown(self) -> None: + seed() + return super().tearDown() + + def create_test_cases(self): + return [ + { + "name": "invalid model", + "input": { + "field": "steps", + "bin_size": 10, + "model": Contest, + }, + "expect": { + "error": "not supported", + }, + }, + { + "name": "empty contest with no user", + "input": { + "field": "age", + "bin_size": 10, + "model": Account, + "contest_id": self.empty_contest_id, + }, + "expect": { + "response": { + "data": [], + } + }, + }, + { + "name": "require contest id data for Leaderboard", + "input": { + "field": "steps", + "bin_size": 10, + "model": Leaderboard, + }, + "expect": { + "error": "contest_id is required", + }, + }, + { + "name": "contest and date are mutually exclusive for Leaderboard", + "input": { + "field": "steps", + "start_date": date(2021, 1, 1), + "bin_size": 10, + "contest_id": self.contest_id, + "model": Leaderboard, + }, + "expect": { + "error": "mutually exclusive", + }, + }, + { + "name": "invalid contest id", + "input": { + "field": "steps", + "bin_size": 10, + "model": Leaderboard, + "contest_id": 999, + }, + "expect": { + "error": "does not exist", + }, + }, + { + "name": "required url parameter 'field'", + "input": { + "bin_size": 10, + "model": Leaderboard, + }, + "expect": { + "error": "field is required", + }, + }, + { + "name": "bin parameters are mutually exclusive", + "input": { + "field": "steps", + "bin_size": 10, + "bin_count": 5, + "bin_custom": "1,2,3,4,5", + "model": Leaderboard, + }, + "expect": { + "error": "are mutually exclusive", + }, + }, + { + "name": "one of bin_size, bin_count, or bin_custom is required", + "input": { + "field": "steps", + "model": Leaderboard, + }, + "expect": { + "error": "bin_size, bin_count, or bin_custom", + }, + }, + { + "name": "invalid bin size", + "input": { + "field": "steps", + "bin_size": -3, + "model": Leaderboard, + }, + "expect": { + "error": "greater than", + }, + }, + { + "name": "invalid bin count", + "input": { + "field": "steps", + "bin_count": -1, + "model": Leaderboard, + }, + "expect": { + "error": "greater than", + }, + }, + { + "name": "bin_custom values must be in increasing order", + "input": { + "field": "steps", + "bin_custom": "1,1,3,4,5", + "model": Leaderboard, + }, + "expect": { + "error": "values must be in increasing order", + }, + }, + { + "name": "bin_custom values must be positive", + "input": { + "field": "steps", + "bin_custom": "-1,2,3,4,5", + "model": Leaderboard, + }, + "expect": { + "error": "values must be positive", + }, + }, + { + "name": "bin_custom could not be parsed", + "input": { + "field": "steps", + "bin_custom": "1,2,3,4,5,a", + "model": Leaderboard, + }, + "expect": { + "error": "could not be parsed", + }, + }, + { + "name": "invalid parameter for `field`", + "input": { + "field": "unsupported_field", + "bin_size": 10, + "model": Leaderboard, + }, + "expect": { + "error": "unsupported_field is not supported", + }, + }, + { + "name": "unsupported `field` for Account", + "input": { + "field": "steps", + "bin_size": 10, + "model": Account, + }, + "expect": { + "error": "steps is not supported", + }, + }, + { + "name": "unsupported `Model`", + "input": { + "field": "steps", + "bin_size": 10, + "model": Contest, + }, + "expect": { + "error": "not supported", + }, + }, + { + "name": "missing `Model`", + "input": { + "field": "steps", + "bin_count": 10, + }, + "expect": { + "error": "Model is required", + }, + }, + { + "name": "contest with no participants for Leaderboard", + "input": { + "field": "steps", + "bin_size": 10, + "contest_id": self.contest_id, + "model": Leaderboard, + }, + "expect": { + "response": { + "data": [], + "unit": "steps", + "bin_size": 10, + } + }, + }, + { + "name": "happy: contest with participants for DailyWalk", + "input": { + "field": "steps", + "bin_count": 5, + "contest_id": self.contest_id, + "model": DailyWalk, + }, + "expect": { + "response": { + "data": [ + { + "bin_idx": 1, + "bin_start": 3750, + "bin_end": 7500, + "count": 14, + }, + { + "bin_idx": 2, + "bin_start": 7500, + "bin_end": 11250, + "count": 14, + }, + { + "bin_idx": 4, + "bin_start": 15000, + "bin_end": 18750, + "count": 14, + }, + ], + "bin_count": 5, + "bin_size": 3750, + } + }, + }, + { + "name": "happy: valid Account request by `date`", + "input": { + "field": "age", + "bin_size": 10, + "model": Account, + "date_start": date(2021, 1, 1), + "date_end": date(2021, 1, 31), + }, + "expect": { + "response": { + "data": [ + { + "bin_idx": 1, + "bin_start": 10, + "bin_end": 20, + "count": 1, + }, + { + "bin_idx": 2, + "bin_start": 20, + "bin_end": 30, + "count": 1, + }, + { + "bin_idx": 5, + "bin_start": 50, + "bin_end": 60, + "count": 2, + }, + { + "bin_idx": 6, + "bin_start": 60, + "bin_end": 70, + "count": 2, + }, + ], + "bin_count": 5, + "bin_size": 3750, + } + }, + }, + { + "name": "happy: valid Account request by `contest`", + "input": { + "field": "age", + "bin_size": 10, + "model": Account, + "contest_id": self.contest_id, + }, + "expect": { + "response": { + "data": [ + { + "bin_idx": 2, + "bin_start": 20, + "bin_end": 30, + "count": 1, + }, + { + "bin_idx": 5, + "bin_start": 50, + "bin_end": 60, + "count": 2, + }, + { + "bin_idx": 6, + "bin_start": 60, + "bin_end": 70, + "count": 1, + }, + ], + "bin_count": 5, + "bin_size": 3750, + } + }, + }, + { + "name": "happy: valid IntentionalWalk by contest", + "input": { + "field": "steps", + "bin_size": 10, + "model": IntentionalWalk, + "contest_id": self.contest_id, + }, + "expect": { + "response": { + "data": [ + { + "bin_idx": 100, + "bin_start": 1000, + "bin_end": 1010, + "count": 5, + }, + { + "bin_idx": 200, + "bin_start": 2000, + "bin_end": 2010, + "count": 5, + }, + { + "bin_idx": 300, + "bin_start": 3000, + "bin_end": 3010, + "count": 5, + }, + ], + "bin_count": 5, + "bin_size": 3750, + } + }, + }, + { + "name": "happy: valid Account request by `contest`, using custom bins", + "input": { + "field": "age", + "bin_custom": "5,18,24,33,55", + "model": Account, + "contest_id": self.contest_id, + }, + "expect": { + "response": { + "data": [ + { + "bin_start": 18, + "bin_end": 24, + "bin_idx": 1, + "count": 1, + }, + { + "bin_start": 33, + "bin_end": 55, + "bin_idx": 3, + "count": 2, + }, + { + "bin_start": 55, + "bin_end": 67, + "bin_idx": 4, + "count": 1, + }, + ] + } + }, + }, + ] + + def test_validate_histogram_request(self): + for test_case in self.create_test_cases(): + test_case_name = test_case["name"] + with self.subTest(msg=test_case_name): + input_data = test_case["input"] + expect = test_case["expect"] + serializer = HistogramReqSerializer( + data=input_data, model=input_data.get("model", None) + ) + is_valid = serializer.is_valid() + errors = "".join( + f"{k}:{v}" for k, v in serializer.errors.items() + ) + if expect.get("error"): + self.assertEqual(is_valid, False) + self.assertTrue( + len(serializer.errors) > 0, + msg="Missing errors", + ) + self.assertIn( + expect["error"], + errors, + ) + else: + self.assertTrue( + is_valid, + msg=f"Unexpected errors: {errors}", + ) + self.assertEqual(serializer.errors, {}) + got = list(serializer.validated_data["query_set"]) + want = expect["response"]["data"] + self.maxDiff = None + self.assertEqual( + got, + want, + msg=f"{test_case_name}: Received: {got}. Expected {want}", + ) diff --git a/home/tests/unit/apiv2/test_user.py b/home/tests/unit/apiv2/test_user.py new file mode 100644 index 00000000..bc9a9d69 --- /dev/null +++ b/home/tests/unit/apiv2/test_user.py @@ -0,0 +1,201 @@ +from datetime import date, datetime, timedelta + +from django.test import Client, TestCase +from home.models import Contest +from home.utils import localize +from home.utils.generators import ( + AccountGenerator, + DailyWalkGenerator, + DeviceGenerator, + IntentionalWalkGenerator, +) +from home.views.web.user import ( + get_daily_walk_summaries, + get_intentional_walk_summaries, +) +from pytz import utc + + +class TestUserListView(TestCase): + def setUp(self): + plum = next( + AccountGenerator().generate( + 1, + email="plum@clue.net", + name="Professor Plum", + ) + ) + mustard = next( + AccountGenerator().generate( + 1, + email="mustard@clue.net", + name="Colonel Mustard", + ) + ) + + # Set mustard.created as if they created the account right + # after contest.start_promo located at the bottom of this + # function. + # We have to do this because the Account model automatically + # adds a NOW() when created, so we set it after creation. + t = utc.localize(datetime(3000, 3, 2, 10, 0)) + mustard.created = t + mustard.save() + + # Device associated with Plum + device_plum = next(DeviceGenerator([plum]).generate(1)) + # Device associated with Mustard + device_mustard = next(DeviceGenerator([mustard]).generate(1)) + + # Generate daily walks (10 per device) + dw_plum = DailyWalkGenerator([device_plum]) + dw_mustard = DailyWalkGenerator([device_mustard]) + for dt in range(20): + # Set dates on walks to 3000-03-01 to 3000-03-20 + t = utc.localize(datetime(3000, 3, 1, 10, 0)) + timedelta(days=dt) + next( + dw_plum.generate( + 1, account_id=plum.id, date=t, steps=100, distance=50 + ) + ) + + next( + dw_mustard.generate( + 1, account_id=mustard.id, date=t, steps=200, distance=100 + ) + ) + + # Generate intentional walks (5, every other day) + iw_plum = IntentionalWalkGenerator([device_plum]) + iw_mustard = IntentionalWalkGenerator([device_mustard]) + for dt in range(10): + # Set dates on walks to + # [2, 4, 6, 8, 10, 12, 14, 16, 18, 20] (3000-03) + t = utc.localize(datetime(3000, 3, 2, 10, 0)) + timedelta( + days=(dt * 2) + ) + next( + iw_plum.generate( + 1, steps=10, start=t, end=(t + timedelta(hours=1)) + ) + ) + next( + iw_mustard.generate( + 1, steps=20, start=t, end=(t + timedelta(hours=2)) + ) + ) + + self.contest = Contest.objects.create( + start_promo="3000-03-01", + start="3000-03-08", + end="3000-03-14", + ) + + def test_get_daily_walk_summaries(self): + # Get one week's worth of data (3/1 to 3/7 inclusive) + dw = get_daily_walk_summaries( + date__range=(date(3000, 3, 1), date(3000, 3, 14)) + ) + + plum_data = dw["plum@clue.net"] + self.assertEqual(14, plum_data["dw_count"]) + self.assertEqual(1400, plum_data["dw_steps"]) + self.assertEqual(700, plum_data["dw_distance"]) + + mustard_data = dw["mustard@clue.net"] + self.assertEqual(14, mustard_data["dw_count"]) + self.assertEqual(2800, mustard_data["dw_steps"]) + self.assertEqual(1400, mustard_data["dw_distance"]) + + def test_get_intentional_walk_summaries(self): + iw = get_intentional_walk_summaries( + start__range=( + localize(date(3000, 3, 1)), + localize(date(3000, 3, 14)) + timedelta(days=1), + ) + ) + + plum_data = iw["plum@clue.net"] + # 7 intentional walks during this period: 2, 4, 6, 8, 10, 12, 14 + self.assertEqual(7, plum_data["rw_count"]) + # 1 hour per walk + self.assertEqual( + 7 * 3600, plum_data["rw_total_walk_time"].total_seconds() + ) + # 10 steps per walk + self.assertEqual(70, plum_data["rw_steps"]) + + mustard_data = iw["mustard@clue.net"] + self.assertEqual(7, mustard_data["rw_count"]) + self.assertEqual( + 14 * 3600, mustard_data["rw_total_walk_time"].total_seconds() + ) + self.assertEqual(140, mustard_data["rw_steps"]) + + def test_UserListView_all_walks(self): + client = Client() + response = client.get("/users/") + user_stats_list = response.context_data["user_stats_list"] + self.assertEqual(2, len(user_stats_list)) + user_stats = {row["account"]["email"]: row for row in user_stats_list} + plum_data = user_stats["plum@clue.net"] + mustard_data = user_stats["mustard@clue.net"] + self.assertEqual(20, plum_data["num_dws"]) + self.assertEqual(20, mustard_data["num_dws"]) + self.assertEqual(10, plum_data["num_rws"]) + self.assertEqual(10, mustard_data["num_rws"]) + + def test_UserListView_with_contest_id(self): + client = Client() + response = client.get( + "/users/", {"contest_id": self.contest.contest_id} + ) + user_stats_list = response.context_data["user_stats_list"] + self.assertEqual(2, len(user_stats_list)) + user_stats = {row["account"]["email"]: row for row in user_stats_list} + plum_data = user_stats["plum@clue.net"] + mustard_data = user_stats["mustard@clue.net"] + # dw: [8, 9. 10, 11, 12, 13, 14] + self.assertEqual(7, plum_data["num_dws"]) + self.assertEqual(7, mustard_data["num_dws"]) + # iw: [8, 10, 12, 14] + self.assertEqual(4, plum_data["num_rws"]) + self.assertEqual(4, mustard_data["num_rws"]) + + def test_UserListView_user_counts(self): + client = Client() + response = client.get( + "/users/", {"contest_id": self.contest.contest_id} + ) + + # We have two users, plum and mustard. + self.assertEqual(response.context_data["cnt_users"], 2) + + # Only mustard created their account during this contest range. + self.assertEqual(response.context_data["cnt_signups"], 1) + + # Both users have recorded walks in the contest we're looking at. + self.assertEqual(response.context_data["cnt_active_users"], 2) + + # But only mustard is new. + self.assertEqual(response.context_data["cnt_new_active_users"], 1) + + +class TestUserListViewEmptyContest(TestCase): + def setUp(self): + self.contest = Contest.objects.create( + start_promo="3000-03-01", + start="3000-03-08", + end="3000-03-14", + ) + + def test_UserListViewEmptyContest_user_counts(self): + client = Client() + response = client.get( + "/users/", {"contest_id": self.contest.contest_id} + ) + + self.assertEqual(response.context_data["cnt_users"], 0) + self.assertEqual(response.context_data["cnt_signups"], 0) + self.assertEqual(response.context_data["cnt_active_users"], 0) + self.assertEqual(response.context_data["cnt_new_active_users"], 0) diff --git a/home/tests/unit/apiv2/test_utils.py b/home/tests/unit/apiv2/test_utils.py new file mode 100644 index 00000000..6fdff333 --- /dev/null +++ b/home/tests/unit/apiv2/test_utils.py @@ -0,0 +1,39 @@ +from django.contrib.auth.models import AnonymousUser, User +from django.http import HttpResponse +from django.test import RequestFactory, TestCase +from django.views import View + +from home.tests.integration.apiv2.views.api.utils import Login +from home.views.api.utils import require_authn + + +class TestAuthn(TestCase): + def setUp(self): + # Create a test view that uses the require_authn decorator + class TestView(View): + @require_authn + def get(self, request): + return HttpResponse("Hello, World!") + + self.view = TestView.as_view() + self.factory = RequestFactory() + + def test_authenticated(self): + Login() + self.user = User.objects.get(username=Login.username) + + # Create a request and authenticate the user + request = self.factory.get("/") + request.user = self.user + + response = self.view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content.decode(), "Hello, World!") + + def test_unauthenticated(self): + # Create a request and don't authenticate the user + request = self.factory.get("/") + request.user = AnonymousUser() + + response = self.view(request) + self.assertEqual(response.status_code, 401) diff --git a/home/views/apiv2/admin.py b/home/views/apiv2/admin.py index 03e62a51..87bb69c1 100644 --- a/home/views/apiv2/admin.py +++ b/home/views/apiv2/admin.py @@ -74,7 +74,7 @@ def get_admin_home(request): def get_contest_start_end(qs: HomeGraphFilter) -> tuple[str, str]: - # handle common parameters for all the chart data API endpoints + # Handle common parameters for all the chart data API endpoints if qs.contest_id: try: contest = Contest.objects.get(pk=qs.contest_id) @@ -98,12 +98,12 @@ def process_results( end_date: str | None, is_cumulative: bool = False, ) -> list: - # handle common result processing for the chart data + # Handle common result processing for the chart data if len(results) > 0: if start_date and results[0][0] != f"{start_date}T00:00:00": results.insert(0, [f"{start_date}T00:00:00", 0]) if end_date and results[-1][0] != f"{end_date}T00:00:00": - if is_cumulative(): + if is_cumulative: results.append([f"{end_date}T00:00:00", results[-1][1]]) else: results.append([f"{end_date}T00:00:00", 0]) @@ -123,9 +123,9 @@ def process_results( def get_users_daily(request, qs: Query[HomeGraphFilter]): start_date, end_date = get_contest_start_end(qs) filters = Q() - # filter to show users vs testers + # Filter to show users vs testers filters = filters & Q(is_tester=qs.is_tester) - # filter by date + # Filter by date if start_date: filters = filters & Q(created__gte=start_date) if end_date: @@ -208,9 +208,9 @@ def get_results_walks_daily( value_type=None, ): filters = Q() - # filter to show users vs testers + # Filter to show users vs testers filters = filters & Q(account__is_tester=is_tester) - # filter by date + # Filter by date if start_date: filters = filters & Q(date__gte=start_date) if end_date: @@ -299,10 +299,10 @@ def get_results_walks_cumulative( (SELECT CONCAT("date", 'T00:00:00') AS "date", SUM("{value_type}") AS "count" - FROM "home_dailywalk" - JOIN "home_account" ON "home_account"."id"="home_dailywalk"."account_id" - WHERE {conditions} - GROUP BY "date") subquery + FROM "home_dailywalk" + JOIN "home_account" ON "home_account"."id"="home_dailywalk"."account_id" + WHERE {conditions} + GROUP BY "date") subquery ORDER BY "date" """, params, @@ -325,7 +325,12 @@ def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): is_tester=qs.is_tester, value_type="steps", ) - results = process_results(results, start_date, end_date) + results = process_results( + results=results, + start_date=start_date, + end_date=end_date, + is_cumulative=True, + ) return results @@ -344,7 +349,10 @@ def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): value_type="distance", ) results = process_results( - results, start_date, end_date, is_cumulative=True + results=results, + start_date=start_date, + end_date=end_date, + is_cumulative=True, ) return results @@ -417,9 +425,8 @@ def get_contests(request): def get_users_by_zip(request, qs: Query[UsersByZipInSchema]): values = ["zip"] order_by = ["zip"] - # if request.user.is_authenticated: payload = {} - # filter and annotate based on contest_id + # Filter and annotate based on contest_id filters = None annotate = { "count": Count("zip"), @@ -430,10 +437,10 @@ def get_users_by_zip(request, qs: Query[UsersByZipInSchema]): else: filters = Q() - # filter to show users vs testers + # Filter to show users vs testers filters = filters & Q(is_tester=qs.is_tester) - # query for totals + # Query for totals results = ( Account.objects.filter(filters) .values(*values) @@ -645,7 +652,7 @@ def get_model_histogram( bin_size=qs.bin_size, bin_custom=qs.bin_custom, ) - # even bins either specified by bin_size or bin_size computed from bin_count + # Even bins either specified by bin_size or bin_size computed from bin_count if res.get("bin_size"): return 200, { "data": list( @@ -659,7 +666,7 @@ def get_model_histogram( "bin_size": res["bin_size"], } - # custom bins + # Custom bins return 200, { "data": list( histogram.fill_missing_bin_idx( diff --git a/home/views/apiv2/appuser.py b/home/views/apiv2/appuser.py index 67d93f57..d0de2d6f 100644 --- a/home/views/apiv2/appuser.py +++ b/home/views/apiv2/appuser.py @@ -4,7 +4,11 @@ from home.models import Account, Device from home.models.account import SAN_FRANCISCO_ZIP_CODES -from home.views.apiv2.schemas.account import AccountPatchSchema, AccountSchema +from home.views.apiv2.schemas.account import ( + AccountPatchSchema, + AccountSchema, + ErrorSchema, +) router = Router() @@ -25,7 +29,7 @@ def update_model(account: Account, json_data: dict): account.save() -@router.post("", response={201: AccountSchema}) +@router.post("", response={201: AccountSchema, 400: ErrorSchema}) @csrf_exempt def create_appuser(request, payload: AccountSchema): # Parse the body json @@ -52,7 +56,7 @@ def create_appuser(request, payload: AccountSchema): # Otherwise, update the account's other details account = Account.objects.get(email__iexact=json_data["email"]) update_model(account, json_data) - # return 201, {"account_id": device.device_id, **account.__dict__} + return 201, payload # This implies that it is a new device except Device.DoesNotExist: @@ -79,7 +83,7 @@ def create_appuser(request, payload: AccountSchema): return 201, payload -@router.put("/{account_id}", response={204: None}) +@router.put("/{account_id}", response={204: None, 404: ErrorSchema}) @csrf_exempt def update_appuser(request, account_id: str, payload: AccountSchema): json_data = payload.dict() @@ -98,7 +102,7 @@ def update_appuser(request, account_id: str, payload: AccountSchema): return 204, None -@router.patch("/{account_id}", response={204: None}) +@router.patch("/{account_id}", response={204: None, 404: ErrorSchema}) @csrf_exempt def update_appuser(request, account_id: str, payload: AccountPatchSchema): json_data = payload.dict() @@ -120,7 +124,7 @@ def update_appuser(request, account_id: str, payload: AccountPatchSchema): return 204, None -@router.delete("/{account_id}", response={204: None}) +@router.delete("/{account_id}", response={204: None, 404: ErrorSchema}) @csrf_exempt def delete_appuser(request, account_id: str): try: diff --git a/home/views/apiv2/contest.py b/home/views/apiv2/contest.py index 7e13cf13..3cc41cc2 100644 --- a/home/views/apiv2/contest.py +++ b/home/views/apiv2/contest.py @@ -3,12 +3,12 @@ from ninja.errors import HttpError from home.models import Contest -from home.views.apiv2.schemas.contest import ContestSchema +from home.views.apiv2.schemas.contest import ContestSchema, ErrorSchema router = Router() -@router.get("/current", response={200: ContestSchema}) +@router.get("/current", response={200: ContestSchema, 404: ErrorSchema}) @csrf_exempt def get_curent_contest(request): # get the current/next Contest diff --git a/home/views/apiv2/dailywalk.py b/home/views/apiv2/dailywalk.py index 2dfd36ef..706a2b3e 100644 --- a/home/views/apiv2/dailywalk.py +++ b/home/views/apiv2/dailywalk.py @@ -44,7 +44,7 @@ def get_daily_walks(request, account_id: str): return 200, {"daily_walks": list(daily_walks)} -@router.post("", response={201: DailyWalkOutSchema}) +@router.post("", response={201: DailyWalkOutSchema, 404: ErrorSchema}) @csrf_exempt def create_daily_walk(request, payload: DailyWalkInSchema): json_data = payload.dict() @@ -63,7 +63,6 @@ def create_daily_walk(request, payload: DailyWalkInSchema): # Json response template json_response = { - "account_id": device.device_id, "daily_walks": [], } diff --git a/home/views/apiv2/device.py b/home/views/apiv2/device.py index d2469fb5..6b05ecac 100644 --- a/home/views/apiv2/device.py +++ b/home/views/apiv2/device.py @@ -24,9 +24,9 @@ def update_device(request, device_id: str, payload: DeviceInSchema): except Device.DoesNotExist: raise HttpError( 404, - f"""Unregistered device - - device_id: {device_id} - Please register first!""", + "Unregistered device - " + f"device_id: {device_id}. " + "Please register first!", ) update_model(device, payload.dict()) @@ -42,9 +42,9 @@ def patch_device(request, device_id: str, payload: DeviceInSchema): raise HttpError( 404, ( - f"Unregistered device - " - f"{device_id}. " - f"Please register first!" + "Unregistered device - " + f"device_id: {device_id}. " + "Please register first!" ), ) update_model(device, payload.dict(exclude_unset=True)) @@ -61,9 +61,9 @@ def delete_device(request, device_id: str): raise HttpError( 404, ( - f"Unregistered device - " - f"{device_id}. " - f"Please register first!" + "Unregistered device - " + f"device_id: {device_id}. " + "Please register first!" ), ) device.account.delete() diff --git a/home/views/apiv2/export.py b/home/views/apiv2/export.py index 86e4f19a..6f51818d 100644 --- a/home/views/apiv2/export.py +++ b/home/views/apiv2/export.py @@ -252,9 +252,8 @@ def export_contest_users_data(file, contest_id, is_tester): @router.get("/users") @csrf_exempt -def export_users(request, contest_id: str, is_tester: str): +def export_users(request, contest_id: str, is_tester: str = "false"): is_tester = is_tester == "true" - if not contest_id: return HttpResponse(status=422) elif not request.user.is_authenticated: diff --git a/home/views/apiv2/leaderboard.py b/home/views/apiv2/leaderboard.py index 9138f5d9..8a5f9bd1 100644 --- a/home/views/apiv2/leaderboard.py +++ b/home/views/apiv2/leaderboard.py @@ -28,13 +28,15 @@ def get_leaderboard(request, contest_id: str, device_id: str): device = Device.objects.get(device_id=device_id) except Device.DoesNotExist: raise HttpError( - 404, ( - f"Unregistered device - " - f"{device_id}. " - f"Please register first!"), + 404, + ( + f"Unregistered device - " + f"{device_id}. " + f"Please register first!" + ), ) - # Json response template + # JSON response template json_response = {"leaderboard": []} leaderboard_list = [] @@ -48,7 +50,7 @@ def get_leaderboard(request, contest_id: str, device_id: str): .annotate(rank=Window(expression=Rank(), order_by=F("steps").desc())) ) - # get top 10 + # Get top 10 leaderboard_list = list(leaderboard[0:leaderboard_length]) # Check if user should be added after top 10 displayed diff --git a/home/views/apiv2/schemas/account.py b/home/views/apiv2/schemas/account.py index d8ae088c..af34c7eb 100644 --- a/home/views/apiv2/schemas/account.py +++ b/home/views/apiv2/schemas/account.py @@ -117,8 +117,6 @@ def validate_gender(self) -> Self: gender = self.gender gender_other = self.gender_other if gender is not None: - # if gender not in GenderLabels.__members__: - # raise ValueError(f"Invalid gender selection '{gender}'") if gender == "OT": if not gender_other: raise ValueError("Must specify 'other' gender") @@ -138,10 +136,6 @@ def validate_sex_orien(self) -> Self: sexual_orien = self.sexual_orien sexual_orien_other = self.sexual_orien_other if sexual_orien is not None: - # if sexual_orien not in SexualOrientationLabels.__members__: - # raise ValueError( - # f"Invalid sexual orientation selection '{sexual_orien}'" - # ) if sexual_orien == "OT": if not sexual_orien_other: raise ValueError("Must specify 'other' sexual orientation") diff --git a/home/views/apiv2/schemas/admin.py b/home/views/apiv2/schemas/admin.py index a7ac46e6..798bfc0a 100644 --- a/home/views/apiv2/schemas/admin.py +++ b/home/views/apiv2/schemas/admin.py @@ -123,9 +123,11 @@ def filter_dict(self) -> int: output_field=BooleanField(), ), "dw_count": Count("dailywalk", filter=dailywalk_filter), - "dw_steps": Sum("dailywalk__steps", filter=dailywalk_filter), + "dw_steps": Sum( + "dailywalk__steps", filter=dailywalk_filter, default=0 + ), "dw_distance": Sum( - "dailywalk__distance", filter=dailywalk_filter + "dailywalk__distance", filter=dailywalk_filter, default=0 ), } intentionalwalk_filter = Q( @@ -136,8 +138,8 @@ def filter_dict(self) -> int: filters = Q() annotate = { "dw_count": Count("dailywalk"), - "dw_steps": Sum("dailywalk__steps"), - "dw_distance": Sum("dailywalk__distance"), + "dw_steps": Sum("dailywalk__steps", default=0), + "dw_distance": Sum("dailywalk__distance", default=0), } intentionalwalk_filter = Q() @@ -146,13 +148,19 @@ def filter_dict(self) -> int: "intentionalwalk", filter=intentionalwalk_filter ), "iw_steps": Sum( - "intentionalwalk__steps", filter=intentionalwalk_filter + "intentionalwalk__steps", + filter=intentionalwalk_filter, + default=0, ), "iw_distance": Sum( - "intentionalwalk__distance", filter=intentionalwalk_filter + "intentionalwalk__distance", + filter=intentionalwalk_filter, + default=0, ), "iw_time": Sum( - "intentionalwalk__walk_time", filter=intentionalwalk_filter + "intentionalwalk__walk_time", + filter=intentionalwalk_filter, + default=0, ), } @@ -232,7 +240,7 @@ class Meta: class UsersOutSchema(Schema): - users: List[UsersOut] + users: List[UsersOut] = Field(default=None) class UsersByZipInSchema(Schema): @@ -255,16 +263,6 @@ class UsersByZipOutSchema(Schema): ) -class UsersByZipActiveInSchema(Schema): - contest_id: str = Field( - default=None, description="The ID of the contest to filter by." - ) - is_tester: bool = Field( - default=False, - description="If true, will only return records related to tester accounts.", - ) - - class HistogramInSchema(Schema): field: str = Field(description="The field to group by") contest_id: str = Field( @@ -372,6 +370,7 @@ class Bins(Schema): description="The end of the bin", ) bin_end: int = Field( + default=None, description="The end of the bin", ) count: int = Field( diff --git a/home/views/apiv2/schemas/device.py b/home/views/apiv2/schemas/device.py index 14c3b47e..477530e0 100644 --- a/home/views/apiv2/schemas/device.py +++ b/home/views/apiv2/schemas/device.py @@ -56,60 +56,6 @@ class DeviceInSchema(Schema): ) -class DeviceOutSchema(Schema): - model_config = ConfigDict(extra="forbid") - - device_id: str = Field( - max_length=250, - description="""A unique id generated by the app when it is first installed" - on a device. Used for authentication on subsequent calls.""", - ) - created: datetime = Field( - description="When the record was created/device was registered" - ) - device_model: str | None = Field( - default=None, - max_length=25, - description="""Unique identifier for the device's model. -

getDeviceid() - Gets the device ID. -
iOS: "iPhone7,2" -
Android: "goldfish" -
Windows: "Y3R94UC#AC4" - """, - ) - manufacturer: str | None = Field( - default=None, - max_length=25, - description="""Manufacturer of the device. -

getManufacturer() - Gets the device manufacturer -
iOS: "Apple" -
Android: "Google" -
Windows: ? - """, - ) - os_name: str | None = Field( - default=None, - max_length=25, - description="""Operating system of the device. -

getSystemName() - Gets the device OS name. -
iOS: "iOS" on newer iOS devices "iPhone OS" on older devices - (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher. -
Android: "Android" -
Windows: ? - """, - ) - os_version: str | None = Field( - default=None, - max_length=25, - description="""Device operating system version. -

getSystemVersion() - Gets the device OS version. -
iOS: "11.0" -
Android: "7.1.1" -
Windows: ? - """, - ) - - class ErrorSchema(Schema): message: str = Field( description="Error message to display", diff --git a/home/views/apiv2/schemas/weeklygoal.py b/home/views/apiv2/schemas/weeklygoal.py index 49660cb4..fda036e8 100644 --- a/home/views/apiv2/schemas/weeklygoal.py +++ b/home/views/apiv2/schemas/weeklygoal.py @@ -46,14 +46,6 @@ class WeeklyGoalOutSchema(Schema): ) -class WeeklyGoalListInSchema(Schema): - model_config = ConfigDict(extra="forbid") - - account_id: str = Field( - description="Account id of the account the data is linked to." - ) - - class WeeklyGoalOutList(Schema): id: int = Field(description="Unique id for the set weekly goal.") start_of_week: str = Field( diff --git a/home/views/apiv2/weeklygoal.py b/home/views/apiv2/weeklygoal.py index d754ed05..dc15808a 100644 --- a/home/views/apiv2/weeklygoal.py +++ b/home/views/apiv2/weeklygoal.py @@ -9,7 +9,6 @@ from home.views.apiv2.schemas.weeklygoal import ( ErrorSchema, WeeklyGoalInSchema, - WeeklyGoalListInSchema, WeeklyGoalListOutSchema, WeeklyGoalOutSchema, ) @@ -76,21 +75,22 @@ def create_weekly_goal(request, payload: WeeklyGoalInSchema): return 201, json_response -@router.get("", response={200: WeeklyGoalListOutSchema, 404: ErrorSchema}) +@router.get( + "/{account_id}", response={200: WeeklyGoalListOutSchema, 404: ErrorSchema} +) @csrf_exempt -def get_weekly_goals(request, payload: WeeklyGoalListInSchema): +def get_weekly_goals(request, account_id: str): """Get List of Weekly Goals""" - json_data = payload.dict() # Get the device try: - device = Device.objects.get(device_id=json_data["account_id"]) + device = Device.objects.get(device_id=account_id) account = device.account except Device.DoesNotExist: raise HttpError( 404, ( - f"Unregistered device - " - f"{json_data['account_id']}. " + f"Unregistered account - " + f"{account_id}. " f"Please register first!" ), ) From 1dbd50082e13a8de4e7837ad521174e6a9815aa8 Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Wed, 11 Sep 2024 23:22:48 -0700 Subject: [PATCH 7/9] Fixed linting errors --- .../0008_alter_gender_race_fields_setfield.py | 2 - home/migrations/0010_auto_20220321_0407.py | 2 - ...vice_model_device_manufacturer_and_more.py | 25 +++++- home/models/account.py | 3 - home/models/device.py | 11 +-- .../integration/apiv1/dailywalk/__init__.py | 1 - .../integration/apiv1/histogram/__init__.py | 1 - .../integration/apiv1/views/api/__init__.py | 1 - .../integration/apiv1/views/api/test_admin.py | 6 +- .../integration/apiv1/weeklygoal/__init__.py | 1 - .../integration/apiv2/appuser/__init__.py | 1 - .../integration/apiv2/appuser/test_delete.py | 2 +- .../integration/apiv2/dailywalk/test_get.py | 2 - .../integration/apiv2/device/test_delete.py | 2 +- .../tests/integration/apiv2/views/__init__.py | 1 - .../integration/apiv2/views/api/test_admin.py | 8 +- .../integration/apiv2/views/web/__init__.py | 1 - .../apiv2/weeklygoal/test_create.py | 2 - home/views/apiv2/admin.py | 11 +-- home/views/apiv2/appuser.py | 6 +- home/views/apiv2/export.py | 1 - home/views/apiv2/schemas/admin.py | 80 +++++++++---------- home/views/apiv2/schemas/device.py | 47 +++++------ home/views/apiv2/schemas/intentionalwalk.py | 1 - home/views/apiv2/schemas/leaderboard.py | 3 - 25 files changed, 101 insertions(+), 120 deletions(-) diff --git a/home/migrations/0008_alter_gender_race_fields_setfield.py b/home/migrations/0008_alter_gender_race_fields_setfield.py index ba369119..e2d0aae1 100644 --- a/home/migrations/0008_alter_gender_race_fields_setfield.py +++ b/home/migrations/0008_alter_gender_race_fields_setfield.py @@ -2,8 +2,6 @@ from django.db import migrations, models -import home.models.account - class Migration(migrations.Migration): diff --git a/home/migrations/0010_auto_20220321_0407.py b/home/migrations/0010_auto_20220321_0407.py index 383c9a0d..8e7abc17 100644 --- a/home/migrations/0010_auto_20220321_0407.py +++ b/home/migrations/0010_auto_20220321_0407.py @@ -2,8 +2,6 @@ from django.db import migrations, models -import home.models.account - class Migration(migrations.Migration): diff --git a/home/migrations/0014_device_device_model_device_manufacturer_and_more.py b/home/migrations/0014_device_device_model_device_manufacturer_and_more.py index 208c4a64..bb0c48a3 100644 --- a/home/migrations/0014_device_device_model_device_manufacturer_and_more.py +++ b/home/migrations/0014_device_device_model_device_manufacturer_and_more.py @@ -15,7 +15,11 @@ class Migration(migrations.Migration): name="device_model", field=models.CharField( blank=True, - help_text='Unique identifier for the device\'s model.\n getDeviceid() - Gets the device ID.\n iOS: "iPhone7,2"\n Android: "goldfish"\n Windows: "Y3R94UC#AC4"\n ', + help_text='''Unique identifier for the device\'s model.\n + getDeviceid() - Gets the device ID. + iOS: "iPhone7,2" + Android: "goldfish" + Windows: "Y3R94UC#AC4"''', max_length=25, null=True, ), @@ -25,7 +29,11 @@ class Migration(migrations.Migration): name="manufacturer", field=models.CharField( blank=True, - help_text='Manufacturer of the device.\n getManufacturer() - Gets the device manufacturer\n iOS: "Apple"\n Android: "Google"\n Windows: ?\n ', + help_text="""Manufacturer of the device.\n + getManufacturer() - Gets the device manufacturer + iOS: "Apple" + Android: "Google" + Windows: ?""", max_length=25, null=True, ), @@ -35,7 +43,12 @@ class Migration(migrations.Migration): name="os_name", field=models.CharField( blank=True, - help_text='Operating system of the device.\n getSystemName() - Gets the device OS name.\n iOS: "iOS" on newer iOS devices "iPhone OS" on older devices (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher.\n Android: "Android"\n Windows: ?\n ', + help_text="""Operating system of the device.\n + getSystemName() - Gets the device OS name. + iOS: "iOS" on newer iOS devices "iPhone OS" on older devices (including older iPad models), + \t\t"iPadOS" for iPads using iPadOS 15.0 or higher. + Android: "Android" + Windows: ?""", max_length=25, null=True, ), @@ -45,7 +58,11 @@ class Migration(migrations.Migration): name="os_version", field=models.CharField( blank=True, - help_text='Device operating system version. \n getSystemVersion() - Gets the device OS version.\n iOS: "11.0"\n Android: "7.1.1"\n Windows: ?\n ', + help_text="""Device operating system version.\n + getSystemVersion() - Gets the device OS version. + iOS: "11.0" + Android: "7.1.1" + Windows: ?""", max_length=25, null=True, ), diff --git a/home/models/account.py b/home/models/account.py index 6a422f3a..770b4352 100644 --- a/home/models/account.py +++ b/home/models/account.py @@ -1,6 +1,3 @@ -import json -from enum import Enum - from django.db import models from django.utils.translation import gettext_lazy as _ diff --git a/home/models/device.py b/home/models/device.py index eb331663..f9f513ac 100644 --- a/home/models/device.py +++ b/home/models/device.py @@ -30,7 +30,7 @@ class Device(models.Model): max_length=25, null=True, blank=True, - help_text="""Unique identifier for the device's model. + help_text="""Unique identifier for the device's model.\n getDeviceid() - Gets the device ID. iOS: "iPhone7,2" Android: "goldfish" @@ -56,7 +56,7 @@ class Device(models.Model): max_length=25, null=True, blank=True, - help_text="""Manufacturer of the device. + help_text="""Manufacturer of the device.\n getManufacturer() - Gets the device manufacturer iOS: "Apple" Android: "Google" @@ -67,9 +67,10 @@ class Device(models.Model): max_length=25, null=True, blank=True, - help_text="""Operating system of the device. + help_text="""Operating system of the device.\n getSystemName() - Gets the device OS name. - iOS: "iOS" on newer iOS devices "iPhone OS" on older devices (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher. + iOS: "iOS" on newer iOS devices "iPhone OS" on older devices (including older iPad models), + \t\t"iPadOS" for iPads using iPadOS 15.0 or higher. Android: "Android" Windows: ? """, @@ -78,7 +79,7 @@ class Device(models.Model): max_length=25, null=True, blank=True, - help_text="""Device operating system version. + help_text="""Device operating system version.\n getSystemVersion() - Gets the device OS version. iOS: "11.0" Android: "7.1.1" diff --git a/home/tests/integration/apiv1/dailywalk/__init__.py b/home/tests/integration/apiv1/dailywalk/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv1/dailywalk/__init__.py +++ b/home/tests/integration/apiv1/dailywalk/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv1/histogram/__init__.py b/home/tests/integration/apiv1/histogram/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv1/histogram/__init__.py +++ b/home/tests/integration/apiv1/histogram/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv1/views/api/__init__.py b/home/tests/integration/apiv1/views/api/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv1/views/api/__init__.py +++ b/home/tests/integration/apiv1/views/api/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv1/views/api/test_admin.py b/home/tests/integration/apiv1/views/api/test_admin.py index 4896b171..3da16280 100644 --- a/home/tests/integration/apiv1/views/api/test_admin.py +++ b/home/tests/integration/apiv1/views/api/test_admin.py @@ -293,7 +293,7 @@ def test_get_users_by_zip(self): # authenticated self.assertTrue(Login.login(c)) - response = c.get(f"/api/admin/users/zip") + response = c.get("/api/admin/users/zip") data = response.json() self.assertEqual( data, @@ -324,7 +324,7 @@ def test_get_users_active_by_zip(self): self.assertTrue(Login.login(c)) # no contest_id given - response = c.get(f"/api/admin/users/zip/active") + response = c.get("/api/admin/users/zip/active") self.assertEqual(response.status_code, 422) response = c.get( @@ -351,7 +351,7 @@ def test_get_users_median_steps_by_zip(self): self.assertTrue(Login.login(c)) # no contest_id given - response = c.get(f"/api/admin/users/zip/steps") + response = c.get("/api/admin/users/zip/steps") self.assertEqual(response.status_code, 422) response = c.get( diff --git a/home/tests/integration/apiv1/weeklygoal/__init__.py b/home/tests/integration/apiv1/weeklygoal/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv1/weeklygoal/__init__.py +++ b/home/tests/integration/apiv1/weeklygoal/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv2/appuser/__init__.py b/home/tests/integration/apiv2/appuser/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv2/appuser/__init__.py +++ b/home/tests/integration/apiv2/appuser/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv2/appuser/test_delete.py b/home/tests/integration/apiv2/appuser/test_delete.py index 20fcc853..ea2e069c 100644 --- a/home/tests/integration/apiv2/appuser/test_delete.py +++ b/home/tests/integration/apiv2/appuser/test_delete.py @@ -179,7 +179,7 @@ def test_delete_user_failure_nonexistent(self): self.check_delete_failure( response, - expected_msg=f"Cannot find device registered with account_id: fakeID", + expected_msg="Cannot find device registered with account_id: fakeID", ) self.check_users_and_device_still_exist( [self.account_id1, self.account_id2] diff --git a/home/tests/integration/apiv2/dailywalk/test_get.py b/home/tests/integration/apiv2/dailywalk/test_get.py index c26d15d3..f1159a81 100644 --- a/home/tests/integration/apiv2/dailywalk/test_get.py +++ b/home/tests/integration/apiv2/dailywalk/test_get.py @@ -1,7 +1,5 @@ from django.test import Client, TestCase -from home.models import Account - class ApiTestCase(TestCase): def setUp(self): diff --git a/home/tests/integration/apiv2/device/test_delete.py b/home/tests/integration/apiv2/device/test_delete.py index ad29cb9a..7df017e4 100644 --- a/home/tests/integration/apiv2/device/test_delete.py +++ b/home/tests/integration/apiv2/device/test_delete.py @@ -179,7 +179,7 @@ def test_delete_user_failure_nonexistent(self): self.check_delete_failure( response, - expected_msg=f"Unregistered device - device_id: fakeID. Please register first!", + expected_msg="Unregistered device - device_id: fakeID. Please register first!", ) self.check_users_and_device_still_exist( [self.account_id1, self.account_id2] diff --git a/home/tests/integration/apiv2/views/__init__.py b/home/tests/integration/apiv2/views/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv2/views/__init__.py +++ b/home/tests/integration/apiv2/views/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv2/views/api/test_admin.py b/home/tests/integration/apiv2/views/api/test_admin.py index 79b57d9f..ba540b6c 100644 --- a/home/tests/integration/apiv2/views/api/test_admin.py +++ b/home/tests/integration/apiv2/views/api/test_admin.py @@ -121,7 +121,7 @@ def test_get_home_steps_daily(self): def test_get_home_steps_daily_invalid_contest_id(self): c = Client() self.assertTrue(Login.login(c)) - response = c.get(f"/api/v2/admin/home/steps/daily?contest_id=invalid") + response = c.get("/api/v2/admin/home/steps/daily?contest_id=invalid") # Check for a successful response by the server self.assertEqual(response.status_code, 404) # Parse the response @@ -308,7 +308,7 @@ def test_get_users_by_zip(self): # authenticated self.assertTrue(Login.login(c)) - response = c.get(f"/api/v2/admin/users/zip") + response = c.get("/api/v2/admin/users/zip") data = response.json() self.assertEqual( data, @@ -341,7 +341,7 @@ def test_get_users_active_by_zip(self): self.assertTrue(Login.login(c)) # no contest_id given - response = c.get(f"/api/v2/admin/users/zip/active") + response = c.get("/api/v2/admin/users/zip/active") self.assertEqual(response.status_code, 404) # Parse the response response_data = response.json() @@ -376,7 +376,7 @@ def test_get_users_median_steps_by_zip(self): self.assertTrue(Login.login(c)) # no contest_id given - response = c.get(f"/api/v2/admin/users/zip/steps") + response = c.get("/api/v2/admin/users/zip/steps") self.assertEqual(response.status_code, 404) # Parse the response response_data = response.json() diff --git a/home/tests/integration/apiv2/views/web/__init__.py b/home/tests/integration/apiv2/views/web/__init__.py index 8b137891..e69de29b 100644 --- a/home/tests/integration/apiv2/views/web/__init__.py +++ b/home/tests/integration/apiv2/views/web/__init__.py @@ -1 +0,0 @@ - diff --git a/home/tests/integration/apiv2/weeklygoal/test_create.py b/home/tests/integration/apiv2/weeklygoal/test_create.py index b1303757..eb67f406 100644 --- a/home/tests/integration/apiv2/weeklygoal/test_create.py +++ b/home/tests/integration/apiv2/weeklygoal/test_create.py @@ -1,7 +1,5 @@ from django.test import Client, TestCase -from home.models import Device - class ApiTestCase(TestCase): def setUp(self): diff --git a/home/views/apiv2/admin.py b/home/views/apiv2/admin.py index 87bb69c1..e4055f08 100644 --- a/home/views/apiv2/admin.py +++ b/home/views/apiv2/admin.py @@ -7,16 +7,13 @@ from django.db import connection from django.db.models import CharField, Count, Q, Sum, Value from django.db.models.functions import Concat, TruncDate -from django.http import HttpRequest, HttpResponse, JsonResponse +from django.http import HttpRequest, HttpResponse from django.views.decorators.csrf import csrf_exempt from ninja import Query, Router from ninja.errors import HttpError, ValidationError from ninja.security import django_auth_superuser from home.models import Account, Contest, DailyWalk -from home.views.api.serializers.response_serializers import ( - GetUsersRespSerializer, -) from home.views.api.utils import paginate from .histogram.histogram import Histogram @@ -317,7 +314,7 @@ def get_results_walks_cumulative( response={200: List, 404: ErrorSchema}, auth=django_auth_superuser, ) -def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): +def get_walks_steps_cumulative(request, qs: Query[HomeGraphFilter]): start_date, end_date = get_contest_start_end(qs) results = get_results_walks_cumulative( start_date, @@ -340,7 +337,7 @@ def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): response={200: List, 404: ErrorSchema}, auth=django_auth_superuser, ) -def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): +def get_walks_distance_cumulative(request, qs: Query[HomeGraphFilter]): start_date, end_date = get_contest_start_end(qs) results = get_results_walks_cumulative( start_date, @@ -367,8 +364,6 @@ def get_walks_steps_daily(request, qs: Query[HomeGraphFilter]): def get_users( request: HttpRequest, response: HttpResponse, qs: Query[UsersInSchema] ): - - contest_id = qs.contest_id filters = qs.filter_dict["filters"] order_by = qs.filter_dict["order_by"] page = qs.filter_dict["page"] diff --git a/home/views/apiv2/appuser.py b/home/views/apiv2/appuser.py index d0de2d6f..8b50bf17 100644 --- a/home/views/apiv2/appuser.py +++ b/home/views/apiv2/appuser.py @@ -85,7 +85,7 @@ def create_appuser(request, payload: AccountSchema): @router.put("/{account_id}", response={204: None, 404: ErrorSchema}) @csrf_exempt -def update_appuser(request, account_id: str, payload: AccountSchema): +def update_appuser_put(request, account_id: str, payload: AccountSchema): json_data = payload.dict() try: device = Device.objects.get(device_id=account_id) @@ -104,7 +104,9 @@ def update_appuser(request, account_id: str, payload: AccountSchema): @router.patch("/{account_id}", response={204: None, 404: ErrorSchema}) @csrf_exempt -def update_appuser(request, account_id: str, payload: AccountPatchSchema): +def update_appuser_patch( + request, account_id: str, payload: AccountPatchSchema +): json_data = payload.dict() try: device = Device.objects.get(device_id=account_id) diff --git a/home/views/apiv2/export.py b/home/views/apiv2/export.py index 6f51818d..3e33ecb6 100644 --- a/home/views/apiv2/export.py +++ b/home/views/apiv2/export.py @@ -6,7 +6,6 @@ from django.db.models import BooleanField, Count, ExpressionWrapper, Q, Sum from django.http import FileResponse, HttpResponse -from django.views import View from django.views.decorators.csrf import csrf_exempt from ninja import Router diff --git a/home/views/apiv2/schemas/admin.py b/home/views/apiv2/schemas/admin.py index 798bfc0a..e5f78bfa 100644 --- a/home/views/apiv2/schemas/admin.py +++ b/home/views/apiv2/schemas/admin.py @@ -1,16 +1,8 @@ from datetime import date, timedelta -from typing import Dict, List, Optional - -from django.db.models import ( - BooleanField, - Count, - ExpressionWrapper, - F, - Q, - QuerySet, - Sum, -) -from ninja import Field, FilterSchema, ModelSchema, Schema +from typing import List + +from django.db.models import BooleanField, Count, ExpressionWrapper, F, Q, Sum +from ninja import Field, ModelSchema, Schema from ninja.errors import ValidationError from pydantic import computed_field, field_validator, model_validator from typing_extensions import Self @@ -39,21 +31,21 @@ class AdminHomeSchema(Schema): class HomeGraphFilter(Schema): contest_id: str = Field( default=None, - description="""The ID of the contest to filter by. -
This field is mutually exclusive with the date fields. -
For distance and step metrics, this will restrict the records - to the values recorded during the contest period's start and end date. -
For account metrics, this will restrict the records to the accounts that participated in the contest.""", + description="""The ID of the contest to filter by.\n + This field is mutually exclusive with the date fields.\n + For distance and step metrics, this will restrict the records + to the values recorded during the contest period's start and end date.\n + For account metrics, this will restrict the records to the accounts that participated in the contest.""", ) start_date: date = Field( default=None, - description="""The start date to filter the records by. -

**Note** start_date and end_date areThese fields are mutually exclusive with the contest_id field.""", + description="""The start date to filter the records by.\n\n + **Note** start_date and end_date areThese fields are mutually exclusive with the contest_id field.""", ) end_date: date = Field( default=None, - description="""The end date to filter the records by. -

**Note** start_date and end_date are mutually exclusive with the contest_id field.""", + description="""The end date to filter the records by.\n\n + **Note** start_date and end_date are mutually exclusive with the contest_id field.""", ) is_tester: bool = Field( default=False, @@ -83,7 +75,7 @@ class ContestOutSchema(Schema): class UsersInSchema(Schema): contest_id: str = Field( default=None, - description="""The ID of the contest to filter by. + description="""The ID of the contest to filter by. Providing this also will add additional metrics related to te contest.""", ) is_tester: bool = Field( @@ -92,7 +84,7 @@ class UsersInSchema(Schema): ) order_by: str = Field( default=None, - description="""The field to order the results by. Prefix with '-' to order in descending order. + description="""The field to order the results by. Prefix with '-' to order in descending order. The secondary sort and default sort will be lexicographically, the 'name'.""", ) page: int = Field( @@ -267,11 +259,11 @@ class HistogramInSchema(Schema): field: str = Field(description="The field to group by") contest_id: str = Field( default=None, - description="""The ID of the contest to filter by. -
This field is mutually exclusive with the date fields. -
For distance and step metrics, this will restrict the records - to the values recorded during the contest period's start and end date. -
For account metrics, this will restrict the records to the accounts that participated in the contest.""", + description="""The ID of the contest to filter by.\n + This field is mutually exclusive with the date fields.\n + For distance and step metrics, this will restrict the records + to the values recorded during the contest period's start and end date.\n + For account metrics, this will restrict the records to the accounts that participated in the contest.""", ) is_tester: bool = Field( default=False, @@ -284,27 +276,27 @@ class HistogramInSchema(Schema): ) bin_count: int = Field( default=None, - description="""The number of bins to group the data by. -

**Note** this is mutually exclusive with the bin_size and bin_custom field.""", + description="""The number of bins to group the data by.\n\n + **Note** this is mutually exclusive with the bin_size and bin_custom field.""", ) bin_custom: str = Field( default=None, - description="""A list of comma separated custom bin sizes in increasing order to group the data by. -

Example: 0,18,29,44,59 -

**Note** this is mutually exclusive with the bin_size and bin_count fields.""", + description="""A list of comma separated custom bin sizes in increasing order to group the data by.\n + Example: 0,18,29,44,59\n\n + **Note** this is mutually exclusive with the bin_size and bin_count fields.""", ) # Date fields to filter by, inclusive. # These fields are mutually exclusive with the contest_id field. start_date: date = Field( default=None, - description="""The start date to filter the records by. -

**Note** start_date and end_date areThese fields are mutually exclusive with the contest_id field.""", + description="""The start date to filter the records by.\n\n + **Note** start_date and end_date areThese fields are mutually exclusive with the contest_id field.""", ) end_date: date = Field( default=None, - description="""The end date to filter the records by. -

**Note** start_date and end_date are mutually exclusive with the contest_id field.""", + description="""The end date to filter the records by.\n\n + **Note** start_date and end_date are mutually exclusive with the contest_id field.""", ) @field_validator("bin_custom") @@ -382,19 +374,19 @@ class HistogramOutSchema(Schema): data: List[Bins] bin_size: int = Field( default=None, - description="""The size of the bin to group the data by. Units will be the same as the field. -

**Note** this is mutually exclusive with the bin_count and bin_custom field.""", + description="""The size of the bin to group the data by. Units will be the same as the field.\n\n + **Note** this is mutually exclusive with the bin_count and bin_custom field.""", ) bin_count: int = Field( default=None, - description="""The number of bins to group the data by. -

**Note** this is mutually exclusive with the bin_size and bin_custom field.""", + description="""The number of bins to group the data by.\n\n + **Note** this is mutually exclusive with the bin_size and bin_custom field.""", ) bin_custom: List[int] | None = Field( default=None, - description="""A list of comma separated custom bin sizes in increasing order to group the data by. -

Example: 0,18,29,44,59 -

Note this is mutually exclusive with the bin_size and bin_count fields.""", + description="""A list of comma separated custom bin sizes in increasing order to group the data by.\n\n + Example: 0,18,29,44,59\n + Note this is mutually exclusive with the bin_size and bin_count fields.""", ) unit: str = Field(description="The unit of measurement for the data") diff --git a/home/views/apiv2/schemas/device.py b/home/views/apiv2/schemas/device.py index 477530e0..df18ce67 100644 --- a/home/views/apiv2/schemas/device.py +++ b/home/views/apiv2/schemas/device.py @@ -1,8 +1,5 @@ -from datetime import datetime - from ninja import Field, Schema from pydantic import ConfigDict -from typing_extensions import Self class DeviceInSchema(Schema): @@ -10,48 +7,48 @@ class DeviceInSchema(Schema): device_id: str = Field( max_length=250, - description="""A unique id generated by the app when it is first installed" + description="""A unique id generated by the app when it is first installed on a device. Used for authentication on subsequent calls.""", ) device_model: str | None = Field( default=None, max_length=25, - description="""Unique identifier for the device's model. -
iOS: "iPhone7,2" -
Android: "goldfish" -
Windows: "Y3R94UC#AC4" + description="""Unique identifier for the device's model.\n + \ngetDeviceid() - Gets the device ID.\n + \tiOS: "iPhone7,2"\n + \tAndroid: "goldfish"\n + \tWindows: "Y3R94UC#AC4" """, ) manufacturer: str | None = Field( default=None, max_length=25, - description="""Manufacturer of the device. -

getManufacturer() - Gets the device manufacturer -
iOS: "Apple" -
Android: "Google" -
Windows: ? + description="""Manufacturer of the device.\n + \ngetManufacturer() - Gets the device manufacturer\n + \tiOS: "Apple"\n + \tAndroid: "Google"\n + \tWindows: ? """, ) os_name: str | None = Field( default=None, max_length=25, - description="""Operating system of the device. -

getSystemName() - Gets the device OS name. -
iOS: "iOS" on newer iOS devices "iPhone OS" on older devices - (including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher. -
Android: "Android" -
Windows: ? + description="""Operating system of the device.\n + \ngetSystemName() - Gets the device OS name.\n + \tiOS: "iOS" on newer iOS devices "iPhone OS" on older devices\n + \t(including older iPad models), "iPadOS" for iPads using iPadOS 15.0 or higher.\n + \tAndroid: "Android"\n + \tWindows: ? """, ) os_version: str | None = Field( default=None, max_length=25, - description="""Device operating system version. -

getSystemVersion() - Gets the device OS version. -
iOS: "11.0" -
Android: "7.1.1" -
Windows: ? + description="""Device operating system version.\n + \ngetSystemVersion() - Gets the device OS version.\n + \tiOS: "11.0"\n + \tndroid: "7.1.1"\n + \tWindows: ? """, ) diff --git a/home/views/apiv2/schemas/intentionalwalk.py b/home/views/apiv2/schemas/intentionalwalk.py index 111082a2..c4c46b5f 100644 --- a/home/views/apiv2/schemas/intentionalwalk.py +++ b/home/views/apiv2/schemas/intentionalwalk.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import List, Optional from ninja import Field, Schema from pydantic import ConfigDict, computed_field diff --git a/home/views/apiv2/schemas/leaderboard.py b/home/views/apiv2/schemas/leaderboard.py index a4ecb581..e3d4062e 100644 --- a/home/views/apiv2/schemas/leaderboard.py +++ b/home/views/apiv2/schemas/leaderboard.py @@ -1,8 +1,5 @@ -from typing import Optional - from ninja import Field, Schema from pydantic import ConfigDict -from typing_extensions import Self class LeaderboardUserSchema(Schema): From fe4d11aac5adf77cda2d5783115319dafad2ecb4 Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Fri, 13 Sep 2024 17:31:43 -0700 Subject: [PATCH 8/9] Bump Python version from 3.11.4 to 3.11.10 --- Dockerfile | 2 +- runtime.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 752000ed..fc9d7f92 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11.9-bookworm +FROM python:3.11.10-bookworm ENV PATH="/root/.local/bin:${PATH}" # Install postgres client diff --git a/runtime.txt b/runtime.txt index 431fc7e8..e3451955 100644 --- a/runtime.txt +++ b/runtime.txt @@ -1 +1 @@ -python-3.11.4 +python-3.11.10 From 0268fdebb6335bd149830b1ea5e16641648dd2cb Mon Sep 17 00:00:00 2001 From: Patrick Yu Date: Sun, 15 Sep 2024 13:08:17 -0700 Subject: [PATCH 9/9] Added test user check updates to APIv2 --- .../integration/apiv2/appuser/test_create.py | 7 +++++-- home/tests/unit/apiv2/api/test_appuser.py | 17 +++++++++-------- home/views/apiv2/appuser.py | 9 +++++---- 3 files changed, 19 insertions(+), 14 deletions(-) diff --git a/home/tests/integration/apiv2/appuser/test_create.py b/home/tests/integration/apiv2/appuser/test_create.py index b0e5df06..f5591671 100644 --- a/home/tests/integration/apiv2/appuser/test_create.py +++ b/home/tests/integration/apiv2/appuser/test_create.py @@ -53,14 +53,17 @@ def test_create_tester_appuser_success(self): request_params = self.request_params.copy() request_params.update( { - "name": "Tester John", + "name": "IWT John", + "zip": "94105", } ) expected_response = self.expected_response.copy() expected_response.update( { - "name": "Tester John", + "name": "IWT John", + "zip": "94105", "is_tester": True, + "is_sf_resident": True, } ) diff --git a/home/tests/unit/apiv2/api/test_appuser.py b/home/tests/unit/apiv2/api/test_appuser.py index 78e55afe..491715f1 100644 --- a/home/tests/unit/apiv2/api/test_appuser.py +++ b/home/tests/unit/apiv2/api/test_appuser.py @@ -6,14 +6,15 @@ class TestIsTester(TestCase): def test_is_tester(self): examples = [ - ("Tester A", True), - ("Test B", False), # are we sure this is the behavior we want? - ("tester c", True), - ("Testerosa", False), - ("tester-d", True), - ("Tester_E", True), - ("testrata", False), - ("tester", False), # are we sure this is the behavior we want? + ("Iwt A", True), + ("Test B", False), + ("iwt c", True), + ("John Iwt", True), + ("Iwterosa", False), + ("iwt-d", False), + ("Iwt_E", False), + ("iwtrata", False), + ("iwt", True), ] for example, expected in examples: self.assertEqual( diff --git a/home/views/apiv2/appuser.py b/home/views/apiv2/appuser.py index 8b50bf17..6866d5fa 100644 --- a/home/views/apiv2/appuser.py +++ b/home/views/apiv2/appuser.py @@ -15,10 +15,11 @@ # Determines whether Account is tester account, based on name prefix def is_tester(name_field: str) -> bool: - possible_prefixes = ["tester-", "tester ", "tester_"] - return any( - [name_field.lower().startswith(prefix) for prefix in possible_prefixes] - ) + parts = name_field.split(" ", 1) + for s in parts: + if "iwt" == s.lower(): + return True + return False def update_model(account: Account, json_data: dict):